diff --git a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java
index 926f5720ac..3074cc6923 100644
--- a/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java
+++ b/accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/mr/TestHiveAccumuloTypes.java
@@ -22,8 +22,7 @@
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
-import java.sql.Date;
-import java.sql.Timestamp;
+import java.time.LocalDateTime;
import java.util.Map.Entry;
import org.apache.accumulo.core.client.BatchWriter;
@@ -39,12 +38,14 @@
import org.apache.hadoop.hive.accumulo.AccumuloHiveConstants;
import org.apache.hadoop.hive.accumulo.AccumuloHiveRow;
import org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
@@ -234,17 +235,17 @@ public void testBinaryTypes() throws Exception {
// date
baos.reset();
- Date now = new Date(System.currentTimeMillis());
- DateWritable dateWritable = new DateWritable(now);
+ Date now = Date.ofEpochMilli(System.currentTimeMillis());
+ DateWritableV2 dateWritable = new DateWritableV2(now);
Date dateValue = dateWritable.get();
dateWritable.write(out);
m.put(cfBytes, "date".getBytes(), baos.toByteArray());
// tiemestamp
baos.reset();
- Timestamp timestampValue = new Timestamp(now.getTime());
+ Timestamp timestampValue = new Timestamp(LocalDateTime.now());
ByteStream.Output output = new ByteStream.Output();
- TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(now.getTime()));
+ TimestampWritable timestampWritable = new TimestampWritable(new Timestamp(LocalDateTime.now()));
timestampWritable.write(new DataOutputStream(output));
output.close();
m.put(cfBytes, "timestamp".getBytes(), output.toByteArray());
@@ -587,8 +588,8 @@ public void testUtf8Types() throws Exception {
m.put(cfBytes, "decimal".getBytes(), baos.toByteArray());
// date
- Date now = new Date(System.currentTimeMillis());
- DateWritable dateWritable = new DateWritable(now);
+ Date now = Date.ofEpochMilli(System.currentTimeMillis());
+ DateWritableV2 dateWritable = new DateWritableV2(now);
Date dateValue = dateWritable.get();
baos.reset();
JavaDateObjectInspector dateOI = (JavaDateObjectInspector) PrimitiveObjectInspectorFactory
@@ -598,7 +599,7 @@ public void testUtf8Types() throws Exception {
m.put(cfBytes, "date".getBytes(), baos.toByteArray());
// timestamp
- Timestamp timestampValue = new Timestamp(now.getTime());
+ Timestamp timestampValue = new Timestamp(LocalDateTime.now());
baos.reset();
JavaTimestampObjectInspector timestampOI = (JavaTimestampObjectInspector) PrimitiveObjectInspectorFactory
.getPrimitiveJavaObjectInspector(TypeInfoFactory
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/Date.java b/common/src/java/org/apache/hadoop/hive/common/type/Date.java
new file mode 100644
index 0000000000..5d666e5d5a
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/type/Date.java
@@ -0,0 +1,176 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import java.time.Instant;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
+import java.time.format.DateTimeParseException;
+import java.time.format.SignStyle;
+
+import static java.time.temporal.ChronoField.DAY_OF_MONTH;
+import static java.time.temporal.ChronoField.MONTH_OF_YEAR;
+import static java.time.temporal.ChronoField.YEAR;
+
+/**
+ * This is the internal type for Date.
+ * The full qualified input format of Date is "yyyy-MM-dd".
+ */
+public class Date implements Comparable {
+
+ private static final LocalDate EPOCH = LocalDate.of(1970, 1, 1);
+ private static final DateTimeFormatter PARSE_FORMATTER;
+ private static final DateTimeFormatter PRINT_FORMATTER;
+ static {
+ DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
+ builder.appendValue(YEAR, 1, 10, SignStyle.NORMAL)
+ .appendLiteral('-')
+ .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NORMAL)
+ .appendLiteral('-')
+ .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NORMAL);
+ PARSE_FORMATTER = builder.toFormatter();
+ builder = new DateTimeFormatterBuilder();
+ builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
+ PRINT_FORMATTER = builder.toFormatter();
+ }
+
+ private LocalDate localDate;
+
+ public Date() {
+ this(EPOCH);
+ }
+
+ public Date(LocalDate localDate) {
+ setLocalDate(localDate);
+ }
+
+ public LocalDate getLocalDate() {
+ return localDate;
+ }
+
+ public void setLocalDate(LocalDate localDate) {
+ this.localDate = localDate != null ? localDate : EPOCH;
+ }
+
+ @Override
+ public String toString() {
+ return localDate.format(PRINT_FORMATTER);
+ }
+
+ public int hashCode() {
+ return localDate.hashCode();
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (other instanceof Date) {
+ return compareTo((Date) other) == 0;
+ }
+ return false;
+ }
+
+ @Override
+ public int compareTo(Date o) {
+ return localDate.compareTo(o.localDate);
+ }
+
+ public int toEpochDay() {
+ return (int) localDate.toEpochDay();
+ }
+
+ public long toEpochSecond() {
+ return localDate.atStartOfDay().toEpochSecond(ZoneOffset.UTC);
+ }
+
+ public long toEpochMilli() {
+ return localDate.atStartOfDay().toInstant(ZoneOffset.UTC).toEpochMilli();
+ }
+
+ public void setYear(int year) {
+ localDate = localDate.withYear(year);
+ }
+
+ public void setMonth(int month) {
+ localDate = localDate.withMonth(month);
+ }
+
+ public void setDayOfMonth(int dayOfMonth) {
+ localDate = localDate.withDayOfMonth(dayOfMonth);
+ }
+
+ public void setTimeInDays(int epochDay) {
+ localDate = LocalDate.ofEpochDay(epochDay);
+ }
+
+ public void setTimeInMillis(long epochMilli) {
+ localDate = LocalDateTime.ofInstant(
+ Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC).toLocalDate();
+ }
+
+ public static Date valueOf(String s) {
+ // Trim anything after date
+ int idx = s.indexOf(" ");
+ if (idx != -1) {
+ s = s.substring(0, idx);
+ }
+ LocalDate localDate;
+ try {
+ localDate = LocalDate.parse(s, PARSE_FORMATTER);
+ } catch (DateTimeParseException e) {
+ throw new IllegalArgumentException("Cannot create date, parsing error");
+ }
+ return new Date(localDate);
+ }
+
+ public static Date ofEpochDay(int epochDay) {
+ return new Date(LocalDate.ofEpochDay(epochDay));
+ }
+
+ public static Date ofEpochMilli(long epochMilli) {
+ return new Date(LocalDateTime.ofInstant(
+ Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC).toLocalDate());
+ }
+
+ public static Date of(int year, int month, int dayOfMonth) {
+ return new Date(LocalDate.of(year, month, dayOfMonth));
+ }
+
+ public int getYear() {
+ return localDate.getYear();
+ }
+
+ public int getMonth() {
+ return localDate.getMonthValue();
+ }
+
+ public int getDay() {
+ return localDate.getDayOfMonth();
+ }
+
+ /**
+ * Return a copy of this object.
+ */
+ public Object clone() {
+ // LocalDateTime is immutable.
+ return new Date(this.localDate);
+ }
+
+}
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java b/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java
new file mode 100644
index 0000000000..30c2ea06cd
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/type/Timestamp.java
@@ -0,0 +1,220 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
+import java.time.format.DateTimeParseException;
+import java.time.format.SignStyle;
+import java.time.temporal.ChronoField;
+
+import static java.time.temporal.ChronoField.DAY_OF_MONTH;
+import static java.time.temporal.ChronoField.MONTH_OF_YEAR;
+import static java.time.temporal.ChronoField.YEAR;
+
+/**
+ * This is the internal type for Timestamp.
+ * The full qualified input format of Timestamp is
+ * "yyyy-MM-dd HH:mm:ss[.SSS...]", where the time part is optional.
+ * If time part is absent, a default '00:00:00.0' will be used.
+ */
+public class Timestamp implements Comparable {
+
+ private static final LocalDateTime EPOCH = LocalDateTime.of(1970, 1, 1, 0, 0, 0);
+ private static final DateTimeFormatter PARSE_FORMATTER;
+ private static final DateTimeFormatter PRINT_FORMATTER;
+
+ static {
+ DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
+ // Date part
+ builder.appendValue(YEAR, 1, 10, SignStyle.NORMAL)
+ .appendLiteral('-')
+ .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NORMAL)
+ .appendLiteral('-')
+ .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NORMAL);
+ // Time part
+ builder
+ .optionalStart()
+ .appendLiteral(" ").append(DateTimeFormatter.ofPattern("HH:mm:ss"))
+ .optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 1, 9, true).optionalEnd()
+ .optionalEnd();
+ PARSE_FORMATTER = builder.toFormatter();
+ builder = new DateTimeFormatterBuilder();
+ // Date and time parts
+ builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
+ // Fractional part
+ builder.optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true).optionalEnd();
+ PRINT_FORMATTER = builder.toFormatter();
+ }
+
+ private LocalDateTime localDateTime;
+
+ public Timestamp() {
+ this(EPOCH);
+ }
+
+ public Timestamp(LocalDateTime localDateTime) {
+ setLocalDateTime(localDateTime);
+ }
+
+ public LocalDateTime getLocalDateTime() {
+ return localDateTime;
+ }
+
+ public void setLocalDateTime(LocalDateTime localDateTime) {
+ this.localDateTime = localDateTime != null ? localDateTime : EPOCH;
+ }
+
+ @Override
+ public String toString() {
+ return localDateTime.format(PRINT_FORMATTER);
+ }
+
+ public int hashCode() {
+ return localDateTime.hashCode();
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (other instanceof Timestamp) {
+ return compareTo((Timestamp) other) == 0;
+ }
+ return false;
+ }
+
+ @Override
+ public int compareTo(Timestamp o) {
+ return localDateTime.compareTo(o.localDateTime);
+ }
+
+ public long toEpochSecond() {
+ return localDateTime.toEpochSecond(ZoneOffset.UTC);
+ }
+
+ public void setTimeInSeconds(long epochSecond) {
+ setTimeInSeconds(epochSecond, 0);
+ }
+
+ public void setTimeInSeconds(long epochSecond, int nanos) {
+ localDateTime = LocalDateTime.ofEpochSecond(
+ epochSecond, nanos, ZoneOffset.UTC);
+ }
+
+ public long toEpochMilli() {
+ return localDateTime.toInstant(ZoneOffset.UTC).toEpochMilli();
+ }
+
+ public void setTimeInMillis(long epochMilli) {
+ localDateTime = LocalDateTime.ofInstant(
+ Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC);
+ }
+
+ public void setTimeInMillis(long epochMilli, int nanos) {
+ localDateTime = LocalDateTime
+ .ofInstant(Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC)
+ .withNano(nanos);
+ }
+
+ public int getNanos() {
+ return localDateTime.getNano();
+ }
+
+ public static Timestamp valueOf(String s) {
+ LocalDateTime localDateTime;
+ try {
+ localDateTime = LocalDateTime.parse(s, PARSE_FORMATTER);
+ } catch (DateTimeParseException e) {
+ // Try ISO-8601 format
+ try {
+ localDateTime = LocalDateTime.parse(s);
+ } catch (DateTimeParseException e2) {
+ throw new IllegalArgumentException("Cannot create timestamp, parsing error");
+ }
+ }
+ return new Timestamp(localDateTime);
+ }
+
+ public static Timestamp ofEpochSecond(long epochSecond) {
+ return ofEpochSecond(epochSecond, 0);
+ }
+
+ public static Timestamp ofEpochSecond(long epochSecond, int nanos) {
+ return new Timestamp(
+ LocalDateTime.ofEpochSecond(epochSecond, nanos, ZoneOffset.UTC));
+ }
+
+ public static Timestamp ofEpochMilli(long epochMilli) {
+ return new Timestamp(LocalDateTime
+ .ofInstant(Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC));
+ }
+
+ public static Timestamp ofEpochMilli(long epochMilli, int nanos) {
+ return new Timestamp(LocalDateTime
+ .ofInstant(Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC)
+ .withNano(nanos));
+ }
+
+ public void setNanos(int nanos) {
+ localDateTime = localDateTime.withNano(nanos);
+ }
+
+ public int getYear() {
+ return localDateTime.getYear();
+ }
+
+ public int getMonth() {
+ return localDateTime.getMonthValue();
+ }
+
+ public int getDay() {
+ return localDateTime.getDayOfMonth();
+ }
+
+ public int getHours() {
+ return localDateTime.getHour();
+ }
+
+ public int getMinutes() {
+ return localDateTime.getMinute();
+ }
+
+ public int getSeconds() {
+ return localDateTime.getSecond();
+ }
+
+ /**
+ * Return a copy of this object.
+ */
+ public Object clone() {
+ // LocalDateTime is immutable.
+ return new Timestamp(this.localDateTime);
+ }
+
+ public java.sql.Timestamp toSqlTimestamp() {
+ java.sql.Timestamp ts = new java.sql.Timestamp(toEpochMilli());
+ ts.setNanos(getNanos());
+ return ts;
+ }
+
+}
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java b/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java
index 90ffddba0d..213650c2a5 100644
--- a/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java
+++ b/common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hive.common.type;
-import java.sql.Timestamp;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.time.DateTimeException;
@@ -31,7 +30,6 @@
import java.time.format.TextStyle;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
-import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -45,9 +43,6 @@
private static final LocalTime DEFAULT_LOCAL_TIME = LocalTime.of(0, 0);
private static final Pattern SINGLE_DIGIT_PATTERN = Pattern.compile("[\\+-]\\d:\\d\\d");
- private static final ThreadLocal CONVERT_FORMATTER =
- ThreadLocal.withInitial(() -> new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"));
-
static final DateTimeFormatter FORMATTER;
static {
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
@@ -126,11 +121,14 @@ public static TimestampTZ parseOrNull(String s, ZoneId defaultTimeZone) {
}
}
- // Converts Date to TimestampTZ. The conversion is done text-wise since
- // Date/Timestamp should be treated as description of date/time.
+ // Converts Date to TimestampTZ.
public static TimestampTZ convert(Date date, ZoneId defaultTimeZone) {
- String s = date instanceof Timestamp ? date.toString() : CONVERT_FORMATTER.get().format(date);
- return parse(s, defaultTimeZone);
+ return parse(date.toString(), defaultTimeZone);
+ }
+
+ // Converts Timestamp to TimestampTZ.
+ public static TimestampTZ convert(Timestamp ts, ZoneId defaultTimeZone) {
+ return parse(ts.toString(), defaultTimeZone);
}
public static ZoneId parseTimeZone(String timeZoneStr) {
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/TimestampUtils.java b/common/src/java/org/apache/hadoop/hive/common/type/TimestampUtils.java
new file mode 100644
index 0000000000..f4ab9b358b
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/type/TimestampUtils.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common.type;
+
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+
+import java.math.BigDecimal;
+
+/**
+ * Utilities for Timestamps and the relevant conversions.
+ */
+public class TimestampUtils {
+ public static final BigDecimal BILLION_BIG_DECIMAL = BigDecimal.valueOf(1000000000);
+
+ /**
+ * Convert the timestamp to a double measured in seconds.
+ * @return double representation of the timestamp, accurate to nanoseconds
+ */
+ public static double getDouble(Timestamp ts) {
+ long seconds = ts.toEpochSecond();
+ return seconds + ((double) ts.getNanos()) / 1000000000;
+ }
+
+ public static Timestamp doubleToTimestamp(double f) {
+ try {
+ long seconds = (long) f;
+
+ // We must ensure the exactness of the double's fractional portion.
+ // 0.6 as the fraction part will be converted to 0.59999... and
+ // significantly reduce the savings from binary serialization
+ BigDecimal bd = new BigDecimal(String.valueOf(f));
+
+ bd = bd.subtract(new BigDecimal(seconds)).multiply(new BigDecimal(1000000000));
+ int nanos = bd.intValue();
+
+ return Timestamp.ofEpochSecond(seconds, nanos);
+ } catch (NumberFormatException nfe) {
+ return null;
+ } catch (IllegalArgumentException iae) {
+ return null;
+ }
+ }
+
+ /**
+ * Take a HiveDecimal and return the timestamp representation where the fraction part is the
+ * nanoseconds and integer part is the number of seconds.
+ * @param dec
+ * @return
+ */
+ public static Timestamp decimalToTimestamp(HiveDecimal dec) {
+
+ HiveDecimalWritable nanosWritable = new HiveDecimalWritable(dec);
+ nanosWritable.mutateFractionPortion(); // Clip off seconds portion.
+ nanosWritable.mutateScaleByPowerOfTen(9); // Bring nanoseconds into integer portion.
+ if (!nanosWritable.isSet() || !nanosWritable.isInt()) {
+ return null;
+ }
+ int nanos = nanosWritable.intValue();
+ if (nanos < 0) {
+ nanos += 1000000000;
+ }
+ nanosWritable.setFromLong(nanos);
+
+ HiveDecimalWritable nanoInstant = new HiveDecimalWritable(dec);
+ nanoInstant.mutateScaleByPowerOfTen(9);
+
+ nanoInstant.mutateSubtract(nanosWritable);
+ nanoInstant.mutateScaleByPowerOfTen(-9); // Back to seconds.
+ if (!nanoInstant.isSet() || !nanoInstant.isLong()) {
+ return null;
+ }
+ long seconds = nanoInstant.longValue();
+ return Timestamp.ofEpochSecond(seconds, nanos);
+ }
+
+ /**
+ * Take a HiveDecimalWritable and return the timestamp representation where the fraction part
+ * is the nanoseconds and integer part is the number of seconds.
+ *
+ * This is a HiveDecimalWritable variation with supplied scratch objects.
+ * @param decWritable
+ * @param scratchDecWritable1
+ * @param scratchDecWritable2
+ * @return
+ */
+ public static Timestamp decimalToTimestamp(
+ HiveDecimalWritable decWritable,
+ HiveDecimalWritable scratchDecWritable1, HiveDecimalWritable scratchDecWritable2) {
+
+ HiveDecimalWritable nanosWritable = scratchDecWritable1;
+ nanosWritable.set(decWritable);
+ nanosWritable.mutateFractionPortion(); // Clip off seconds portion.
+ nanosWritable.mutateScaleByPowerOfTen(9); // Bring nanoseconds into integer portion.
+ if (!nanosWritable.isSet() || !nanosWritable.isInt()) {
+ return null;
+ }
+ int nanos = nanosWritable.intValue();
+ if (nanos < 0) {
+ nanos += 1000000000;
+ }
+ nanosWritable.setFromLong(nanos);
+
+ HiveDecimalWritable nanoInstant = scratchDecWritable2;
+ nanoInstant.set(decWritable);
+ nanoInstant.mutateScaleByPowerOfTen(9);
+
+ nanoInstant.mutateSubtract(nanosWritable);
+ nanoInstant.mutateScaleByPowerOfTen(-9); // Back to seconds.
+ if (!nanoInstant.isSet() || !nanoInstant.isLong()) {
+ return null;
+ }
+ long seconds = nanoInstant.longValue();
+
+ return Timestamp.ofEpochSecond(seconds, nanos);
+ }
+
+ public static Timestamp decimalToTimestamp(HiveDecimalV1 dec) {
+ try {
+ BigDecimal nanoInstant = dec.bigDecimalValue().multiply(BILLION_BIG_DECIMAL);
+ int nanos = nanoInstant.remainder(BILLION_BIG_DECIMAL).intValue();
+ if (nanos < 0) {
+ nanos += 1000000000;
+ }
+ long seconds =
+ nanoInstant.subtract(new BigDecimal(nanos)).divide(BILLION_BIG_DECIMAL).longValue();
+
+ return Timestamp.ofEpochSecond(seconds, nanos);
+ } catch (NumberFormatException nfe) {
+ return null;
+ } catch (IllegalArgumentException iae) {
+ return null;
+ }
+ }
+
+ /**
+ * Rounds the number of milliseconds relative to the epoch down to the nearest whole number of
+ * seconds. 500 would round to 0, -500 would round to -1.
+ */
+ public static long millisToSeconds(long millis) {
+ if (millis >= 0) {
+ return millis / 1000;
+ } else {
+ return (millis - 999) / 1000;
+ }
+ }
+
+}
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 931533a556..aa71e96990 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1881,7 +1881,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal
"Maximum fraction of heap that can be used by Parquet file writers in one task.\n" +
"It is for avoiding OutOfMemory error in tasks. Work with Parquet 1.6.0 and above.\n" +
"This config parameter is defined in Parquet, so that it does not start with 'hive.'."),
- HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION("hive.parquet.timestamp.skip.conversion", true,
+ HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION("hive.parquet.timestamp.skip.conversion", false,
"Current Hive implementation of parquet stores timestamps to UTC, this flag allows skipping of the conversion" +
"on reading parquet files from other tools"),
HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS("hive.int.timestamp.conversion.in.seconds", false,
diff --git a/common/src/java/org/apache/hive/common/util/DateParser.java b/common/src/java/org/apache/hive/common/util/DateParser.java
index 949fdbafcf..bf6695522b 100644
--- a/common/src/java/org/apache/hive/common/util/DateParser.java
+++ b/common/src/java/org/apache/hive/common/util/DateParser.java
@@ -17,24 +17,18 @@
*/
package org.apache.hive.common.util;
-import java.sql.Date;
-import java.text.ParsePosition;
-import java.text.SimpleDateFormat;
+import org.apache.hadoop.hive.common.type.Date;
/**
* Date parser class for Hive.
*/
public class DateParser {
- private final SimpleDateFormat formatter;
- private final ParsePosition pos;
+
public DateParser() {
- formatter = new SimpleDateFormat("yyyy-MM-dd");
- // TODO: ideally, we should set formatter.setLenient(false);
- pos = new ParsePosition(0);
- }
+ }
public Date parseDate(String strValue) {
- Date result = new Date(0);
+ Date result = new Date();
if (parseDate(strValue, result)) {
return result;
}
@@ -42,12 +36,11 @@ public Date parseDate(String strValue) {
}
public boolean parseDate(String strValue, Date result) {
- pos.setIndex(0);
- java.util.Date parsedVal = formatter.parse(strValue, pos);
+ Date parsedVal = Date.valueOf(strValue);
if (parsedVal == null) {
return false;
}
- result.setTime(parsedVal.getTime());
+ result.setTimeInMillis(parsedVal.toEpochMilli());
return true;
}
}
diff --git a/common/src/java/org/apache/hive/common/util/TimestampParser.java b/common/src/java/org/apache/hive/common/util/TimestampParser.java
index f674b5d30b..d30ab88892 100644
--- a/common/src/java/org/apache/hive/common/util/TimestampParser.java
+++ b/common/src/java/org/apache/hive/common/util/TimestampParser.java
@@ -18,19 +18,18 @@
package org.apache.hive.common.util;
-import java.math.BigDecimal;
-import java.sql.Timestamp;
import java.util.Arrays;
-import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.joda.time.DateTime;
-import org.joda.time.IllegalInstantException;
+import org.joda.time.LocalDateTime;
import org.joda.time.MutableDateTime;
import org.joda.time.DateTimeFieldType;
+import org.joda.time.chrono.ISOChronology;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
@@ -52,19 +51,8 @@
protected final static String[] stringArray = new String[] {};
protected final static String millisFormatString = "millis";
- @Nullable
- private final static DateTime startingDateValue = makeStartingDateValue();
-
- @Nullable
- private static DateTime makeStartingDateValue() {
- try {
- return new DateTime(1970, 1, 1, 0, 0, 0, 0);
- } catch (IllegalInstantException e) {
- // 1970-01-01 00:00:00 did not exist in some zones. In these zones, we need to take different,
- // less optimal parsing route.
- return null;
- }
- }
+ protected final static DateTime startingDateValue =
+ new DateTime(1970, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC());
protected String[] formatStrings = null;
protected DateTimeFormatter fmt = null;
@@ -126,21 +114,24 @@ public Timestamp parseTimestamp(String strValue) throws IllegalArgumentException
if (startingDateValue != null) {
// reset value in case any date fields are missing from the date pattern
- MutableDateTime mdt = new MutableDateTime(startingDateValue);
+ MutableDateTime mdt = new MutableDateTime(
+ startingDateValue, ISOChronology.getInstanceUTC());
// Using parseInto() avoids throwing exception when parsing,
// allowing fallback to default timestamp parsing if custom patterns fail.
int ret = fmt.parseInto(mdt, strValue, 0);
// Only accept parse results if we parsed the entire string
if (ret == strValue.length()) {
- return Optional.of(new Timestamp(mdt.getMillis()));
+ return Optional.of(Timestamp.ofEpochMilli(mdt.getMillis()));
}
return Optional.empty();
}
try {
- DateTime dt = fmt.parseDateTime(strValue);
- return Optional.of(new Timestamp(dt.getMillis()));
+ LocalDateTime dt = fmt.parseLocalDateTime(strValue);
+ return Optional.of(
+ Timestamp.ofEpochMilli(
+ dt.toDateTime(ISOChronology.getInstanceUTC().getZone()).getMillis()));
} catch (IllegalArgumentException e) {
return Optional.empty();
}
@@ -181,7 +172,8 @@ public int parseInto(DateTimeParserBucket bucket, String text, int position) {
// Joda DateTime only has precision to millis, cut off any fractional portion
long millis = Long.parseLong(matcher.group(1));
- DateTime dt = new DateTime(millis);
+ DateTime dt =
+ new DateTime(millis, ISOChronology.getInstanceUTC());
for (DateTimeFieldType field : dateTimeFields) {
bucket.saveField(field, dt.get(field));
}
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
index 72dce4deaa..1435339956 100644
--- a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java
@@ -17,26 +17,18 @@
*/
package org.apache.hadoop.hive.common.type;
-import java.sql.Timestamp;
import java.util.Random;
-import java.util.Arrays;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritableV1;
-import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.orc.impl.SerializationUtils;
import org.apache.hadoop.hive.common.type.RandomTypeUtil;
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
-import org.apache.hadoop.hive.ql.util.TimestampUtils;
import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
import org.junit.*;
diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
index 5a3f0481bc..cd23abebfa 100644
--- a/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
+++ b/common/src/test/org/apache/hadoop/hive/common/type/TestTimestampTZ.java
@@ -21,7 +21,6 @@
import org.junit.Assert;
import org.junit.Test;
-import java.sql.Timestamp;
import java.time.ZoneId;
import java.time.format.DateTimeParseException;
import java.util.TimeZone;
diff --git a/common/src/test/org/apache/hive/common/util/TestDateParser.java b/common/src/test/org/apache/hive/common/util/TestDateParser.java
index 0553b3d387..3a3585124c 100644
--- a/common/src/test/org/apache/hive/common/util/TestDateParser.java
+++ b/common/src/test/org/apache/hive/common/util/TestDateParser.java
@@ -18,13 +18,13 @@
package org.apache.hive.common.util;
import static org.junit.Assert.*;
-import org.junit.Test;
-import java.sql.Date;
+import org.apache.hadoop.hive.common.type.Date;
+import org.junit.Test;
public class TestDateParser {
DateParser parser = new DateParser();
- Date date = new Date(0);
+ Date date = new Date();
void checkValidCase(String strValue, Date expected) {
Date dateValue = parser.parseDate(strValue);
diff --git a/common/src/test/org/apache/hive/common/util/TestTimestampParser.java b/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
index c982af65c6..3cc474f82b 100644
--- a/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
+++ b/common/src/test/org/apache/hive/common/util/TestTimestampParser.java
@@ -18,11 +18,10 @@
package org.apache.hive.common.util;
-import java.sql.Timestamp;
-import java.util.Arrays;
-import java.util.List;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
-import static org.junit.Assert.*;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.junit.Test;
public class TestTimestampParser {
@@ -133,10 +132,10 @@ public void testMillisParser() {
TimestampParser tp = new TimestampParser(patterns);
ValidTimestampCase[] validCases = {
- new ValidTimestampCase("0", new Timestamp(0)),
- new ValidTimestampCase("-1000000", new Timestamp(-1000000)),
- new ValidTimestampCase("1420509274123", new Timestamp(1420509274123L)),
- new ValidTimestampCase("1420509274123.456789", new Timestamp(1420509274123L)),
+ new ValidTimestampCase("0", Timestamp.ofEpochMilli(0)),
+ new ValidTimestampCase("-1000000", Timestamp.ofEpochMilli(-1000000)),
+ new ValidTimestampCase("1420509274123", Timestamp.ofEpochMilli(1420509274123L)),
+ new ValidTimestampCase("1420509274123.456789", Timestamp.ofEpochMilli(1420509274123L)),
// Other format pattern should also work
new ValidTimestampCase("1945-12-31T23:59:59",
diff --git a/data/files/alltypesorc3xcols b/data/files/alltypesorc3xcols
index e48487328b..4d1a98aaed 100644
Binary files a/data/files/alltypesorc3xcols and b/data/files/alltypesorc3xcols differ
diff --git a/data/files/orc_split_elim.orc b/data/files/orc_split_elim.orc
index cd145d3431..ff3557f249 100644
Binary files a/data/files/orc_split_elim.orc and b/data/files/orc_split_elim.orc differ
diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
index 5f7657975a..221e748dfe 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidSerDe.java
@@ -19,6 +19,21 @@
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
+import java.io.IOException;
+import java.io.InputStream;
+import java.time.Instant;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.stream.Collectors;
+
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import io.druid.query.Druids;
@@ -29,6 +44,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.conf.Constants;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -81,21 +97,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.io.IOException;
-import java.io.InputStream;
-import java.sql.Timestamp;
-import java.time.Instant;
-import java.time.ZonedDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-import java.util.stream.Collectors;
/**
* DruidSerDe that is used to deserialize objects from a Druid data source.
@@ -319,8 +320,7 @@ public Writable serialize(Object o, ObjectInspector objectInspector) throws SerD
switch (types[i].getPrimitiveCategory()) {
case TIMESTAMP:
res = ((TimestampObjectInspector) fields.get(i).getFieldObjectInspector())
- .getPrimitiveJavaObject(
- values.get(i)).getTime();
+ .getPrimitiveJavaObject(values.get(i)).toEpochMilli();
break;
case TIMESTAMPLOCALTZ:
res = ((TimestampLocalTZObjectInspector) fields.get(i).getFieldObjectInspector())
@@ -374,7 +374,7 @@ public Writable serialize(Object o, ObjectInspector objectInspector) throws SerD
.equals(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME));
value.put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME,
((TimestampObjectInspector) fields.get(granularityFieldIndex).getFieldObjectInspector())
- .getPrimitiveJavaObject(values.get(granularityFieldIndex)).getTime()
+ .getPrimitiveJavaObject(values.get(granularityFieldIndex)).toEpochMilli()
);
if (values.size() == columns.length + 2) {
// Then partition number if any.
diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java
index e45de0f93f..adfa335558 100644
--- a/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java
+++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/serde/TestDruidSerDe.java
@@ -27,7 +27,6 @@
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
-import java.sql.Timestamp;
import java.time.Instant;
import java.time.ZoneOffset;
import java.util.ArrayList;
@@ -35,20 +34,12 @@
import java.util.Map.Entry;
import java.util.Properties;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.google.common.util.concurrent.SettableFuture;
-import com.metamx.http.client.HttpClient;
-import com.metamx.http.client.response.HttpResponseHandler;
-import io.druid.data.input.Row;
-import io.druid.query.Result;
-import io.druid.query.select.SelectResultValue;
-import io.druid.query.timeseries.TimeseriesResultValue;
-import io.druid.query.topn.TopNResultValue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.conf.Constants;
import org.apache.hadoop.hive.druid.DruidStorageHandlerUtils;
@@ -79,17 +70,25 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
-
import org.junit.Before;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
+import com.google.common.util.concurrent.SettableFuture;
+import com.metamx.http.client.HttpClient;
+import com.metamx.http.client.response.HttpResponseHandler;
+import io.druid.data.input.Row;
import io.druid.query.Query;
+import io.druid.query.Result;
+import io.druid.query.select.SelectResultValue;
+import io.druid.query.timeseries.TimeseriesResultValue;
+import io.druid.query.topn.TopNResultValue;
/**
* Basic tests for Druid SerDe. The examples are taken from Druid 0.9.1.1
@@ -777,7 +776,7 @@ private void deserializeQueryResults(DruidSerDe serDe, String queryType, String
new IntWritable(1112123),
new ShortWritable((short) 12),
new ByteWritable((byte) 0),
- new TimestampWritable(new Timestamp(1377907200000L)) // granularity
+ new TimestampWritable(Timestamp.ofEpochSecond(1377907200L)) // granularity
};
private static final DruidWritable DRUID_WRITABLE = new DruidWritable(
ImmutableMap.builder()
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java
index bc4e1466f5..380577936a 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseRowSerializer.java
@@ -102,7 +102,7 @@ public Writable serialize(Object obj, ObjectInspector objInspector) throws Excep
timestamp = ((LongObjectInspector)inspector).get(value);
} else {
PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector;
- timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime();
+ timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).toEpochMilli();
}
}
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
index 05cc30a621..8b73bfbeb1 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseInputFormatUtil.java
@@ -35,7 +35,6 @@
import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
import org.apache.hadoop.hive.ql.exec.ExprNodeConstantEvaluator;
import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
-import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
@@ -296,7 +295,7 @@ static long getTimestampVal(IndexSearchCondition sc) throws IOException {
timestamp = ((LongObjectInspector) inspector).get(value);
} else {
PrimitiveObjectInspector primitive = (PrimitiveObjectInspector) inspector;
- timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).getTime();
+ timestamp = PrimitiveObjectInspectorUtils.getTimestamp(value, primitive).toEpochMilli();
}
} catch (HiveException e) {
throw new IOException(e);
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
index d94dbe8d8a..2aeaa33b9b 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
@@ -23,6 +23,7 @@
import java.util.List;
import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory;
import org.apache.hadoop.hive.serde2.SerDeException;
@@ -161,7 +162,8 @@ private Object uncheckedGetField(int fieldID) {
}
LazyObjectBase lz = fields[fieldID];
if (lz instanceof LazyTimestamp) {
- ((LazyTimestamp) lz).getWritableObject().setTime(timestamp);
+ ((LazyTimestamp) lz).getWritableObject().set(
+ Timestamp.ofEpochMilli(timestamp));
} else {
((LazyLong) lz).getWritableObject().set(timestamp);
}
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
index 6dcee4024b..d33d343a0e 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/DataType.java
@@ -19,12 +19,12 @@
package org.apache.hive.hcatalog.data;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
index 114c205c83..af80c02b9e 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
@@ -21,8 +21,6 @@
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.CharacterCodingException;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -34,9 +32,11 @@
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.AbstractSerDe;
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
index cb1c459afb..c27903d29a 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/ReaderWriter.java
@@ -22,7 +22,6 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import java.sql.Date;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Iterator;
@@ -30,10 +29,12 @@
import java.util.Map;
import java.util.Map.Entry;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
@@ -121,7 +122,7 @@ public static Object readDatum(DataInput in) throws IOException {
hdw.readFields(in);
return hdw.getHiveDecimal();
case DataType.DATE:
- DateWritable dw = new DateWritable();
+ DateWritableV2 dw = new DateWritableV2();
dw.readFields(in);
return dw.get();
case DataType.TIMESTAMP:
@@ -214,10 +215,10 @@ public static void writeDatum(DataOutput out, Object val) throws IOException {
new HiveDecimalWritable((HiveDecimal)val).write(out);
return;
case DataType.DATE:
- new DateWritable((Date)val).write(out);
+ new DateWritableV2((Date)val).write(out);
return;
case DataType.TIMESTAMP:
- new TimestampWritable((java.sql.Timestamp)val).write(out);
+ new TimestampWritable((Timestamp)val).write(out);
return;
default:
throw new IOException("Unexpected data type " + type +
diff --git a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
index ec620d2fe0..98e2472bc9 100644
--- a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
+++ b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatBaseStorer.java
@@ -419,7 +419,7 @@ private Object getJavaObj(Object pigObj, HCatFieldSchema hcatFS) throws HCatExce
return new HiveVarchar(varcharVal, vti.getLength());
case TIMESTAMP:
DateTime dt = (DateTime)pigObj;
- return new Timestamp(dt.getMillis());//getMillis() returns UTC time regardless of TZ
+ return new Timestamp(dt.getMillis());//toEpochMilli() returns UTC time regardless of TZ
case DATE:
/**
* We ignore any TZ setting on Pig value since java.sql.Date doesn't have it (in any
diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
index d80b6d43fe..2ed5e5e928 100644
--- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
+++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/vectorization/ColumnVectorGenUtil.java
@@ -16,11 +16,10 @@
package org.apache.hive.benchmark.vectorization;
-import java.sql.Timestamp;
import java.util.Random;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -28,12 +27,14 @@
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+
public class ColumnVectorGenUtil {
private static final long LONG_VECTOR_NULL_VALUE = 1;
@@ -144,7 +145,7 @@ private static ColumnVector generateTimestampColumnVector(final boolean nulls,
final boolean repeating, final int size, final Random rand) {
Timestamp[] timestamps = new Timestamp[size];
for (int i = 0; i < size; i++) {
- timestamps[i] = new Timestamp(rand.nextInt());
+ timestamps[i] = Timestamp.ofEpochMilli(rand.nextInt());
}
return generateTimestampColumnVector(nulls, repeating, size, rand, timestamps);
}
@@ -169,10 +170,10 @@ public static TimestampColumnVector generateTimestampColumnVector(
tcv.isNull[i] = false;
if (!repeating) {
Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(rand);
- tcv.set(i, randomTimestamp);
+ tcv.set(i, randomTimestamp.toSqlTimestamp());
timestampValues[i] = randomTimestamp;
} else {
- tcv.set(i, repeatingTimestamp);
+ tcv.set(i, repeatingTimestamp.toSqlTimestamp());
timestampValues[i] = repeatingTimestamp;
}
}
diff --git a/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java b/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
index 77559e1e58..f328d6ee4d 100644
--- a/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
+++ b/llap-common/src/test/org/apache/hadoop/hive/llap/io/TestChunkedInputStream.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hive.llap.io;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
import java.io.FilterInputStream;
import java.io.FilterOutputStream;
import java.io.IOException;
@@ -31,7 +29,7 @@
import java.util.List;
import java.util.Random;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
import org.junit.Test;
import static org.junit.Assert.*;
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
index feccb878b7..35b749abcf 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
@@ -217,7 +217,7 @@ private void createColumnReaders(OrcEncodedColumnBatch batch,
TreeReaderFactory.Context context = new TreeReaderFactory.ReaderContext()
.setSchemaEvolution(evolution).skipCorrupt(skipCorrupt)
.writerTimeZone(stripeMetadata.getWriterTimezone())
- ;
+ .useUTCTimestamp(true);
this.batchSchemas = includes.getBatchReaderTypes(fileSchema);
StructTreeReader treeReader = EncodedTreeReaderFactory.createRootTreeReader(
batchSchemas, stripeMetadata.getEncodings(), batch, codec, context);
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
index 4033b379de..2947c167fc 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
@@ -74,8 +74,6 @@
import org.apache.hadoop.hive.llap.io.metadata.MetadataCache;
import org.apache.hadoop.hive.llap.io.metadata.MetadataCache.LlapBufferOrBuffers;
import org.apache.hadoop.hive.llap.io.metadata.OrcStripeMetadata;
-import org.apache.hadoop.hive.ql.exec.DDLTask;
-import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.io.HdfsUtils;
import org.apache.orc.CompressionKind;
import org.apache.orc.DataReader;
@@ -766,7 +764,8 @@ private boolean determineRgsToRead(int rowIndexStride,
if (sarg != null && rowIndexStride != 0) {
sargApp = new RecordReaderImpl.SargApplier(sarg,
rowIndexStride, evolution,
- OrcFile.WriterVersion.from(fileMetadata.getWriterVersionNum()));
+ OrcFile.WriterVersion.from(OrcFile.WriterImplementation.ORC_JAVA, fileMetadata.getWriterVersionNum()),
+ false);
}
boolean hasAnyData = false;
// stripeRgs should have been initialized by this time with an empty array.
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java
index 1cfe92978a..ba2575f8a4 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/SerDeEncodedDataReader.java
@@ -563,6 +563,23 @@ public void setCurrentStripeOffsets(long currentKnownTornStart,
public CompressionCodec getCompressionCodec() {
return null;
}
+
+ @Override
+ public long getFileBytes(int column) {
+ long size = 0L;
+ List l = this.colStreams.get(column);
+ if (l == null) {
+ return size;
+ }
+ for (CacheOutputReceiver c : l) {
+ if (!c.suppressed && c.getName().getArea() != StreamName.Area.INDEX) {
+ for (MemoryBuffer buffer : c.getData()) {
+ size += buffer.getByteBufferRaw().limit();
+ }
+ }
+ }
+ return size;
+ }
}
private interface CacheOutput {
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java
index bf139c071c..89ad4aa8cd 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java
@@ -20,6 +20,7 @@
import java.util.List;
import org.apache.orc.CompressionKind;
+import org.apache.orc.FileFormatException;
import org.apache.orc.OrcProto.Type;
import org.apache.orc.TypeDescription;
@@ -27,5 +28,5 @@
int getStripeCount();
CompressionKind getCompressionKind();
List getTypes();
- TypeDescription getSchema();
+ TypeDescription getSchema() throws FileFormatException;
}
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
index 0012afb3ef..5cd6f9fa2c 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
@@ -22,7 +22,9 @@
import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
import org.apache.orc.CompressionKind;
+import org.apache.orc.FileFormatException;
import org.apache.orc.FileMetadata;
+import org.apache.orc.OrcFile;
import org.apache.orc.OrcProto;
import org.apache.orc.OrcProto.StripeStatistics;
import org.apache.orc.OrcUtils;
@@ -123,6 +125,11 @@ public int getMetadataSize() {
return metadataSize;
}
+ @Override
+ public int getWriterImplementation() {
+ return OrcFile.WriterImplementation.ORC_JAVA.getId();
+ }
+
@Override
public int getWriterVersionNum() {
return writerVersionNum;
@@ -153,7 +160,7 @@ public int getStripeCount() {
return stripes.size();
}
- public TypeDescription getSchema() {
+ public TypeDescription getSchema() throws FileFormatException {
return OrcUtils.convertTypeFromProtobuf(this.types, 0);
}
}
diff --git a/pom.xml b/pom.xml
index 1f43c416db..cb4b746643 100644
--- a/pom.xml
+++ b/pom.xml
@@ -184,7 +184,7 @@
0.9.3
2.10.0
2.3
- 1.4.3
+ 1.5.0
1.10.19
2.0.0-M5
4.1.17.Final
diff --git a/ql/pom.xml b/ql/pom.xml
index 06124f7387..d52c307cff 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -943,6 +943,7 @@
org.apache.hive:hive-spark-client
org.apache.hive:hive-storage-api
org.apache.orc:orc-core
+ org.apache.orc:orc-shims
org.apache.orc:orc-tools
joda-time:joda-time
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
index 0d3ee2b74c..515b608b6f 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt
@@ -22,7 +22,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import java.sql.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template DTIColumnCompareScalar.txt, which covers comparison
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
index be5f641291..183b7a5014 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt
@@ -19,7 +19,7 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
import java.sql.Date;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
index 32dd6ed69f..c3982ed4d9 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
@@ -26,8 +26,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template DateColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
@@ -99,65 +99,65 @@ public class extends VectorExpression {
* conditional checks in the inner loop.
*/
if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
scratchIntervalYearMonth2.set((int) vector2[0]);
dtm.(
scratchDate1, scratchIntervalYearMonth2, outputDate);
- outputVector[0] = DateWritable.dateToDays(outputDate);
+ outputVector[0] = DateWritableV2.dateToDays(outputDate);
} else if (inputColVector1.isRepeating) {
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
scratchDate1, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
} else {
for(int i = 0; i != n; i++) {
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
scratchDate1, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else if (inputColVector2.isRepeating) {
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
scratchDate1, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
} else {
for(int i = 0; i != n; i++) {
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
scratchDate1, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else {
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
scratchDate1, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
} else {
for(int i = 0; i != n; i++) {
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
scratchDate1, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
}
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
index 94c0c5c86f..06c73687cd 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template DateColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
@@ -93,10 +93,10 @@ public class extends VectorExpression {
if (inputColVector1.isRepeating) {
if (inputColVector1.noNulls || !inputIsNull[0]) {
outputIsNull[0] = false;
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
dtm.(
scratchDate1, value, outputDate);
- outputVector[0] = DateWritable.dateToDays(outputDate);
+ outputVector[0] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[0] = true;
outputColVector.noNulls = false;
@@ -115,18 +115,18 @@ public class extends VectorExpression {
for(int j = 0; j != n; j++) {
final int i = sel[j];
outputIsNull[i] = false;
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchDate1, value, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
} else {
for(int j = 0; j != n; j++) {
final int i = sel[j];
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchDate1, value, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else {
@@ -138,10 +138,10 @@ public class extends VectorExpression {
outputColVector.noNulls = true;
}
for(int i = 0; i != n; i++) {
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchDate1, value, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else /* there are NULLs in the inputColVector */ {
@@ -155,10 +155,10 @@ public class extends VectorExpression {
int i = sel[j];
if (!inputIsNull[i]) {
outputIsNull[i] = false;
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchDate1, value, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[i] = true;
outputColVector.noNulls = false;
@@ -168,10 +168,10 @@ public class extends VectorExpression {
for(int i = 0; i != n; i++) {
if (!inputIsNull[i]) {
outputIsNull[i] = false;
- scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchDate1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchDate1, value, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[i] = true;
outputColVector.noNulls = false;
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
index 96c525d2b5..53637a6cd5 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
@@ -28,8 +28,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template DateColumnArithmeticTimestampColumn.txt, a class
@@ -97,12 +97,12 @@ public class extends VectorExpression {
* conditional checks in the inner loop.
*/
if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
dtm.(
scratchTimestamp1, inputColVector2.asScratch(0), outputColVector.getScratch());
outputColVector.setFromScratch(0);
} else if (inputColVector1.isRepeating) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
@@ -122,14 +122,14 @@ public class extends VectorExpression {
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchTimestamp1, value2, outputColVector.getScratch());
outputColVector.setFromScratch(i);
}
} else {
for(int i = 0; i != n; i++) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchTimestamp1, value2, outputColVector.getScratch());
outputColVector.setFromScratch(i);
@@ -139,14 +139,14 @@ public class extends VectorExpression {
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchTimestamp1, inputColVector2.asScratch(i), outputColVector.getScratch());
outputColVector.setFromScratch(i);
}
} else {
for(int i = 0; i != n; i++) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchTimestamp1, inputColVector2.asScratch(i), outputColVector.getScratch());
outputColVector.setFromScratch(i);
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
index fb22992657..7df1066c69 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template DateColumnArithmeticTimestampScalarBase.txt, a base class
@@ -91,7 +91,7 @@ public class extends VectorExpression {
if (inputColVector1.isRepeating) {
if (inputColVector1.noNulls || !inputIsNull[0]) {
outputIsNull[0] = false;
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
dtm.(
scratchTimestamp1, value, outputColVector.getScratch());
outputColVector.setFromScratch(0);
@@ -112,7 +112,7 @@ public class extends VectorExpression {
for(int j = 0; j != n; j++) {
final int i = sel[j];
outputIsNull[i] = false;
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchTimestamp1, value, outputColVector.getScratch());
outputColVector.setFromScratch(i);
@@ -120,7 +120,7 @@ public class extends VectorExpression {
} else {
for(int j = 0; j != n; j++) {
final int i = sel[j];
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchTimestamp1, value, outputColVector.getScratch());
outputColVector.setFromScratch(i);
@@ -135,7 +135,7 @@ public class extends VectorExpression {
outputColVector.noNulls = true;
}
for(int i = 0; i != n; i++) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchTimestamp1, value, outputColVector.getScratch());
outputColVector.setFromScratch(i);
@@ -152,7 +152,7 @@ public class extends VectorExpression {
int i = sel[j];
if (!inputIsNull[i]) {
outputIsNull[i] = false;
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchTimestamp1, value, outputColVector.getScratch());
outputColVector.setFromScratch(i);
@@ -165,7 +165,7 @@ public class extends VectorExpression {
for(int i = 0; i != n; i++) {
if (!inputIsNull[i]) {
outputIsNull[i] = false;
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.(
scratchTimestamp1, value, outputColVector.getScratch());
outputColVector.setFromScratch(i);
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
index 0c8ec9c161..7d2434ae13 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
@@ -37,8 +37,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template DateTimeScalarArithmeticIntervalYearMonthColumn.txt.
@@ -58,7 +58,7 @@ public class extends VectorExpression {
public (long value, int colNum, int outputColumnNum) {
super(outputColumnNum);
- this.value = new Date(DateWritable.daysToMillis((int) value));
+ this.value = new Date(DateWritableV2.daysToMillis((int) value));
this.colNum = colNum;
}
@@ -110,7 +110,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth2.set((int) vector2[0]);
dtm.(
value, scratchIntervalYearMonth2, outputDate);
- outputVector[0] = DateWritable.dateToDays(outputDate);
+ outputVector[0] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[0] = true;
outputColVector.noNulls = false;
@@ -131,7 +131,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
value, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
} else {
for(int j = 0; j != n; j++) {
@@ -139,7 +139,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
value, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else {
@@ -154,7 +154,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
value, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else /* there are NULLs in the inputColVector */ {
@@ -171,7 +171,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
value, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[i] = true;
outputColVector.noNulls = false;
@@ -184,7 +184,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth2.set((int) vector2[i]);
dtm.(
value, scratchIntervalYearMonth2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[i] = true;
outputColVector.noNulls = false;
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
index ef8f2a3965..678d827a03 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
@@ -36,8 +36,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template DateTimeScalarArithmeticTimestampColumnBase.txt.
@@ -57,7 +57,7 @@ public class extends VectorExpression {
super(outputColumnNum);
// Scalar input #1 is type date (days). For the math we convert it to a timestamp.
this.value = new Timestamp(0);
- this.value.setTime(DateWritable.daysToMillis((int) value));
+ this.value.setTime(DateWritableV2.daysToMillis((int) value));
this.colNum = colNum;
}
@@ -189,7 +189,7 @@ public class extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return "val " + value + ", " + getColumnParamString(1, colNum);
+ return "val " + org.apache.hadoop.hive.common.type.Date.ofEpochMilli(value.getTime()) + ", " + getColumnParamString(1, colNum);
}
@Override
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
index 5242bbd24f..1785abef5f 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetweenDynamicValue.txt
@@ -22,7 +22,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterColumnBetween;
import org.apache.hadoop.hive.ql.plan.DynamicValue;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
index 393413f5d7..4473bf06b3 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateColumn.txt
@@ -26,8 +26,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template DateColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
@@ -100,44 +100,44 @@ public class extends VectorExpression {
*/
if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
scratchIntervalYearMonth1.set((int) vector1[0]);
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
dtm.(
scratchIntervalYearMonth1, scratchDate2, outputDate);
- outputVector[0] = DateWritable.dateToDays(outputDate);
+ outputVector[0] = DateWritableV2.dateToDays(outputDate);
} else if (inputColVector1.isRepeating) {
scratchIntervalYearMonth1.set((int) vector1[0]);
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
scratchIntervalYearMonth1, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
} else {
for(int i = 0; i != n; i++) {
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
scratchIntervalYearMonth1, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else if (inputColVector2.isRepeating) {
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
scratchIntervalYearMonth1.set((int) vector1[i]);
dtm.(
scratchIntervalYearMonth1, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
} else {
for(int i = 0; i != n; i++) {
scratchIntervalYearMonth1.set((int) vector1[i]);
dtm.(
scratchIntervalYearMonth1, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else {
@@ -145,18 +145,18 @@ public class extends VectorExpression {
for(int j = 0; j != n; j++) {
int i = sel[j];
scratchIntervalYearMonth1.set((int) vector1[i]);
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
scratchIntervalYearMonth1, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
} else {
for(int i = 0; i != n; i++) {
scratchIntervalYearMonth1.set((int) vector1[i]);
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
scratchIntervalYearMonth1, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
}
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
index a65c4b952c..f0c1910ae5 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticDateScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template DateColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
@@ -50,7 +50,7 @@ public class extends VectorExpression {
public (int colNum, long value, int outputColumnNum) {
super(outputColumnNum);
this.colNum = colNum;
- this.value = new Date(DateWritable.daysToMillis((int) value));
+ this.value = new Date(DateWritableV2.daysToMillis((int) value));
}
public () {
@@ -96,7 +96,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth1.set((int) vector1[0]);
dtm.(
scratchIntervalYearMonth1, value, outputDate);
- outputVector[0] = DateWritable.dateToDays(outputDate);
+ outputVector[0] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[0] = true;
outputColVector.noNulls = false;
@@ -118,7 +118,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth1.set((int) vector1[i]);
dtm.(
scratchIntervalYearMonth1, value, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
} else {
for(int j = 0; j != n; j++) {
@@ -126,7 +126,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth1.set((int) vector1[i]);
dtm.(
scratchIntervalYearMonth1, value, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else {
@@ -141,7 +141,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth1.set((int) vector1[i]);
dtm.(
scratchIntervalYearMonth1, value, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else /* there are NULLs in the inputColVector */ {
@@ -158,7 +158,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth1.set((int) vector1[i]);
dtm.(
scratchIntervalYearMonth1, value, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[i] = true;
outputColVector.noNulls = false;
@@ -170,7 +170,7 @@ public class extends VectorExpression {
scratchIntervalYearMonth1.set((int) vector1[i]);
dtm.(
scratchIntervalYearMonth1, value, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
outputIsNull[i] = false;
} else {
outputIsNull[i] = true;
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
index c297116ced..c845df046d 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthColumnArithmeticTimestampScalar.txt
@@ -182,7 +182,7 @@ public class extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return getColumnParamString(0, colNum) + ", val " + value.toString();
+ return getColumnParamString(0, colNum) + ", val " + org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(value.getTime());
}
@Override
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
index 34d516e231..27f2fcf0e2 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalYearMonthScalarArithmeticDateColumn.txt
@@ -37,8 +37,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template DateTimeScalarArithmeticIntervalYearMonthColumn.txt.
@@ -107,10 +107,10 @@ public class extends VectorExpression {
if (inputColVector2.isRepeating) {
if (inputColVector2.noNulls || !inputIsNull[0]) {
outputIsNull[0] = false;
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
dtm.(
value, scratchDate2, outputDate);
- outputVector[0] = DateWritable.dateToDays(outputDate);
+ outputVector[0] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[0] = true;
outputColVector.noNulls = false;
@@ -129,18 +129,18 @@ public class extends VectorExpression {
for(int j = 0; j != n; j++) {
final int i = sel[j];
outputIsNull[i] = false;
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
} else {
for(int j = 0; j != n; j++) {
final int i = sel[j];
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else {
@@ -152,10 +152,10 @@ public class extends VectorExpression {
outputColVector.noNulls = true;
}
for(int i = 0; i != n; i++) {
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
}
}
} else /* there are NULLs in the inputColVector */ {
@@ -169,10 +169,10 @@ public class extends VectorExpression {
int i = sel[j];
if (!inputIsNull[i]) {
outputIsNull[i] = false;
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[i] = true;
outputColVector.noNulls = false;
@@ -182,10 +182,10 @@ public class extends VectorExpression {
for(int i = 0; i != n; i++) {
if (!inputIsNull[i]) {
outputIsNull[i] = false;
- scratchDate2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchDate2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value, scratchDate2, outputDate);
- outputVector[i] = DateWritable.dateToDays(outputDate);
+ outputVector[i] = DateWritableV2.dateToDays(outputDate);
} else {
outputIsNull[i] = true;
outputColVector.noNulls = false;
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
index 45f6408c3d..8e44c92a67 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateColumn.txt
@@ -27,8 +27,8 @@ import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template TimestampColumnArithmeticDateColumn.txt, which covers binary arithmetic
@@ -96,7 +96,7 @@ public class extends VectorExpression {
* conditional checks in the inner loop.
*/
if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
dtm.(
inputColVector1.asScratch(0), scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(0);
@@ -105,21 +105,21 @@ public class extends VectorExpression {
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value1, scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(i);
}
} else {
for(int i = 0; i != n; i++) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value1, scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(i);
}
}
} else if (inputColVector2.isRepeating) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
@@ -138,14 +138,14 @@ public class extends VectorExpression {
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
inputColVector1.asScratch(i), scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(i);
}
} else {
for(int i = 0; i != n; i++) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
inputColVector1.asScratch(i), scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(i);
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
index 0bbdce756d..cb897e4ebb 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampColumnArithmeticDateScalar.txt
@@ -29,8 +29,8 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template TimestampColumnArithmeticDateScalar.txt, which covers binary arithmetic
@@ -49,7 +49,7 @@ public class extends VectorExpression {
super(outputColumnNum);
this.colNum = colNum;
this.value = new Timestamp(0);
- this.value.setTime(DateWritable.daysToMillis((int) value));
+ this.value.setTime(DateWritableV2.daysToMillis((int) value));
}
public () {
@@ -175,7 +175,7 @@ public class extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return getColumnParamString(0, colNum) + ", val " + value.toString();
+ return getColumnParamString(0, colNum) + ", val " + org.apache.hadoop.hive.common.type.Date.ofEpochMilli(value.getTime());
}
@Override
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
index 9a21cda21b..4ba0d65750 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticDateColumn.txt
@@ -36,8 +36,8 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Generated from template TimestampScalarArithmeticDateColumnBase.txt.
@@ -104,7 +104,7 @@ public class extends VectorExpression {
if (inputColVector2.isRepeating) {
if (inputColVector2.noNulls || !inputIsNull[0]) {
outputIsNull[0] = false;
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
dtm.(
value, scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(0);
@@ -126,7 +126,7 @@ public class extends VectorExpression {
for(int j = 0; j != n; j++) {
final int i = sel[j];
outputIsNull[i] = false;
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value, scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(i);;
@@ -134,7 +134,7 @@ public class extends VectorExpression {
} else {
for(int j = 0; j != n; j++) {
final int i = sel[j];
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value, scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(i);
@@ -149,7 +149,7 @@ public class extends VectorExpression {
outputColVector.noNulls = true;
}
for(int i = 0; i != n; i++) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value, scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(i);
@@ -166,7 +166,7 @@ public class extends VectorExpression {
int i = sel[j];
if (!inputIsNull[i]) {
outputIsNull[i] = false;
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value, scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(i);
@@ -179,7 +179,7 @@ public class extends VectorExpression {
for(int i = 0; i != n; i++) {
if (!inputIsNull[i]) {
outputIsNull[i] = false;
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.(
value, scratchTimestamp2, outputColVector.getScratch());
outputColVector.setFromScratch(i);
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
index dc4f5c8d4c..ea3e342a9f 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/TimestampScalarArithmeticIntervalYearMonthColumn.txt
@@ -195,7 +195,7 @@ public class extends VectorExpression {
@Override
public String vectorExpressionParameters() {
- return "val " + value.toString() + ", " + getColumnParamString(1, colNum);
+ return "val " + org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(value.getTime()) + ", " + getColumnParamString(1, colNum);
}
@Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
index a53ff5aff7..61fb3d3914 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
@@ -51,7 +51,7 @@
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
import org.apache.hadoop.hive.ql.plan.api.StageType;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -319,7 +319,7 @@ public String getName() {
private Date readDateValue(String dateStr) {
// try either yyyy-mm-dd, or integer representing days since epoch
try {
- DateWritable writableVal = new DateWritable(java.sql.Date.valueOf(dateStr));
+ DateWritableV2 writableVal = new DateWritableV2(org.apache.hadoop.hive.common.type.Date.valueOf(dateStr));
return new Date(writableVal.getDays());
} catch (IllegalArgumentException err) {
// Fallback to integer parsing
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index a1f549a367..be79892ab3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -289,15 +289,15 @@
system.registerGenericUDF(UNARY_PLUS_FUNC_NAME, GenericUDFOPPositive.class);
system.registerGenericUDF(UNARY_MINUS_FUNC_NAME, GenericUDFOPNegative.class);
- system.registerUDF("day", UDFDayOfMonth.class, false);
- system.registerUDF("dayofmonth", UDFDayOfMonth.class, false);
+ system.registerGenericUDF("day", UDFDayOfMonth.class);
+ system.registerGenericUDF("dayofmonth", UDFDayOfMonth.class);
system.registerUDF("dayofweek", UDFDayOfWeek.class, false);
- system.registerUDF("month", UDFMonth.class, false);
+ system.registerGenericUDF("month", UDFMonth.class);
system.registerGenericUDF("quarter", GenericUDFQuarter.class);
- system.registerUDF("year", UDFYear.class, false);
- system.registerUDF("hour", UDFHour.class, false);
- system.registerUDF("minute", UDFMinute.class, false);
- system.registerUDF("second", UDFSecond.class, false);
+ system.registerGenericUDF("year", UDFYear.class);
+ system.registerGenericUDF("hour", UDFHour.class);
+ system.registerGenericUDF("minute", UDFMinute.class);
+ system.registerGenericUDF("second", UDFSecond.class);
system.registerUDF("from_unixtime", UDFFromUnixTime.class, false);
system.registerGenericUDF("to_date", GenericUDFDate.class);
system.registerUDF("weekofyear", UDFWeekOfYear.class, false);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
index 58252e1b7f..c90524168c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java
@@ -20,7 +20,8 @@
import java.util.concurrent.TimeUnit;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -30,13 +31,19 @@
static final long NANOSECONDS_PER_MILLISECOND = TimeUnit.MILLISECONDS.toNanos(1);
public static long daysToNanoseconds(long daysSinceEpoch) {
- return DateWritable.daysToMillis((int) daysSinceEpoch) * NANOSECONDS_PER_MILLISECOND;
+ return DateWritableV2.daysToMillis((int) daysSinceEpoch) * NANOSECONDS_PER_MILLISECOND;
}
public static TimestampWritable timestampColumnVectorWritable(
TimestampColumnVector timestampColVector, int elementNum,
TimestampWritable timestampWritable) {
- timestampWritable.set(timestampColVector.asScratchTimestamp(elementNum));
+ java.sql.Timestamp ts = timestampColVector.asScratchTimestamp(elementNum);
+ if (ts == null) {
+ timestampWritable.set((Timestamp) null);
+ return timestampWritable;
+ }
+ Timestamp newTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
+ timestampWritable.set(newTS);
return timestampWritable;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
index e96619cf86..84b72af189 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAssignRow.java
@@ -18,11 +18,11 @@
package org.apache.hadoop.hive.ql.exec.vector;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
@@ -38,10 +38,10 @@
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.VectorPartitionConversion;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -174,7 +174,7 @@ private void initConvertSourceEntry(int logicalColumnIndex, TypeInfo convertSour
((PrimitiveTypeInfo) targetTypeInfos[logicalColumnIndex]).getPrimitiveCategory();
switch (targetPrimitiveCategory) {
case DATE:
- convertTargetWritables[logicalColumnIndex] = new DateWritable();
+ convertTargetWritables[logicalColumnIndex] = new DateWritableV2();
break;
case STRING:
convertTargetWritables[logicalColumnIndex] = new Text();
@@ -414,19 +414,19 @@ private void assignRowColumn(
case TIMESTAMP:
if (object instanceof Timestamp) {
((TimestampColumnVector) columnVector).set(
- batchIndex, ((Timestamp) object));
+ batchIndex, ((Timestamp) object).toSqlTimestamp());
} else {
((TimestampColumnVector) columnVector).set(
- batchIndex, ((TimestampWritable) object).getTimestamp());
+ batchIndex, ((TimestampWritable) object).getTimestamp().toSqlTimestamp());
}
break;
case DATE:
if (object instanceof Date) {
((LongColumnVector) columnVector).vector[batchIndex] =
- DateWritable.dateToDays((Date) object);
+ DateWritableV2.dateToDays((Date) object);
} else {
((LongColumnVector) columnVector).vector[batchIndex] =
- ((DateWritable) object).getDays();
+ ((DateWritableV2) object).getDays();
}
break;
case FLOAT:
@@ -711,7 +711,7 @@ private void assignConvertRowColumn(ColumnVector columnVector, int batchIndex,
return;
}
((TimestampColumnVector) columnVector).set(
- batchIndex, timestamp);
+ batchIndex, timestamp.toSqlTimestamp());
}
break;
case DATE:
@@ -722,9 +722,9 @@ private void assignConvertRowColumn(ColumnVector columnVector, int batchIndex,
VectorizedBatchUtil.setNullColIsNullValue(columnVector, batchIndex);
return;
}
- DateWritable dateWritable = (DateWritable) convertTargetWritable;
+ DateWritableV2 dateWritable = (DateWritableV2) convertTargetWritable;
if (dateWritable == null) {
- dateWritable = new DateWritable();
+ dateWritable = new DateWritableV2();
}
dateWritable.set(date);
((LongColumnVector) columnVector).vector[batchIndex] =
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
index 47eaf367ea..839e1e92af 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorBatchDebug.java
@@ -18,9 +18,8 @@
package org.apache.hadoop.hive.ql.exec.vector;
-import java.sql.Timestamp;
-
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -77,9 +76,9 @@ public static void debugDisplayOneRow(VectorizedRowBatch batch, int index, Strin
} else if (colVector instanceof DecimalColumnVector) {
sb.append(((DecimalColumnVector) colVector).vector[index].toString());
} else if (colVector instanceof TimestampColumnVector) {
- Timestamp timestamp = new Timestamp(0);
+ java.sql.Timestamp timestamp = new java.sql.Timestamp(0);
((TimestampColumnVector) colVector).timestampUpdate(timestamp, index);
- sb.append(timestamp.toString());
+ sb.append(Timestamp.ofEpochMilli(timestamp.getTime(), timestamp.getNanos()).toString());
} else if (colVector instanceof IntervalDayTimeColumnVector) {
HiveIntervalDayTime intervalDayTime = ((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(index);
sb.append(intervalDayTime.toString());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
index c555464280..99f1df9975 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hive.ql.exec.vector;
-import java.sql.Timestamp;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@@ -27,9 +26,10 @@
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -50,7 +50,6 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
-import org.apache.hive.common.util.DateUtils;
/**
* This class is used as a static factory for VectorColumnAssign.
@@ -170,10 +169,10 @@ protected void assignDecimal(HiveDecimalWritable hdw, int index) {
extends VectorColumnAssignVectorBase {
protected void assignTimestamp(Timestamp value, int index) {
- outCol.set(index, value);
+ outCol.set(index, value.toSqlTimestamp());
}
protected void assignTimestamp(TimestampWritable tw, int index) {
- outCol.set(index, tw.getTimestamp());
+ outCol.set(index, tw.getTimestamp().toSqlTimestamp());
}
}
@@ -355,7 +354,7 @@ public void assignObjectValue(Object val, int destIndex) throws HiveException {
assignNull(destIndex);
}
else {
- DateWritable bw = (DateWritable) val;
+ DateWritableV2 bw = (DateWritableV2) val;
assignLong(bw.getDays(), destIndex);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
index 8ea625e466..fb03894e72 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
@@ -31,7 +31,7 @@
import org.apache.hadoop.hive.ql.plan.VectorPartitionConversion;
import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -556,7 +556,7 @@ private void storePrimitiveRowColumn(ColumnVector colVector, Field field,
break;
case TIMESTAMP:
((TimestampColumnVector) colVector).set(
- batchIndex, deserializeRead.currentTimestampWritable.getTimestamp());
+ batchIndex, deserializeRead.currentTimestampWritable.getTimestamp().toSqlTimestamp());
break;
case DATE:
((LongColumnVector) colVector).vector[batchIndex] = deserializeRead.currentDateWritable.getDays();
@@ -1087,9 +1087,9 @@ private Object convertPrimitiveRowColumn(int batchIndex, Field field) throws IOE
case DATE:
{
if (writable == null) {
- writable = new DateWritable();
+ writable = new DateWritableV2();
}
- ((DateWritable) writable).set(deserializeRead.currentDateWritable);
+ ((DateWritableV2) writable).set(deserializeRead.currentDateWritable);
}
break;
case FLOAT:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
index 152d75b949..60ab75cb07 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExtractRow.java
@@ -23,10 +23,11 @@
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
@@ -36,7 +37,6 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -237,11 +237,14 @@ public Object extractRowColumn(
((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case TIMESTAMP:
- ((TimestampWritable) primitiveWritable).set(
- ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedIndex));
+ // From java.sql.Timestamp used by vectorization to serializable org.apache.hadoop.hive.common.type.Timestamp
+ java.sql.Timestamp ts =
+ ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedIndex);
+ Timestamp serializableTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
+ ((TimestampWritable) primitiveWritable).set(serializableTS);
return primitiveWritable;
case DATE:
- ((DateWritable) primitiveWritable).set(
+ ((DateWritableV2) primitiveWritable).set(
(int) ((LongColumnVector) colVector).vector[adjustedIndex]);
return primitiveWritable;
case FLOAT:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
index 1f46f2cf19..38c31a516a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.exec.vector;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hive.common.util.Murmur3;
import java.sql.Date;
@@ -30,11 +31,9 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import com.google.common.base.Preconditions;
@@ -413,7 +412,7 @@ public String stringifyKeys(VectorColumnSetInfo columnSetInfo)
case DATE:
{
Date dt = new Date(0);
- dt.setTime(DateWritable.daysToMillis((int) longValues[i]));
+ dt.setTime(DateWritableV2.daysToMillis((int) longValues[i]));
sb.append(" date ");
sb.append(dt.toString());
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
index cb2efb7dbe..66585af577 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSerializeRow.java
@@ -22,6 +22,7 @@
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.ByteStream.Output;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -432,7 +433,10 @@ private void serializePrimitiveWrite(
serializeWrite.writeDate((int) ((LongColumnVector) colVector).vector[adjustedBatchIndex]);
break;
case TIMESTAMP:
- serializeWrite.writeTimestamp(((TimestampColumnVector) colVector).asScratchTimestamp(adjustedBatchIndex));
+ // From java.sql.Timestamp used by vectorization to serializable org.apache.hadoop.hive.common.type.Timestamp
+ java.sql.Timestamp ts = ((TimestampColumnVector) colVector).asScratchTimestamp(adjustedBatchIndex);
+ Timestamp serializableTS = Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos());
+ serializeWrite.writeTimestamp(serializableTS);
break;
case FLOAT:
serializeWrite.writeFloat((float) ((DoubleColumnVector) colVector).vector[adjustedBatchIndex]);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 491a6b1257..f3ab4de8ec 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.exec.vector;
import java.lang.reflect.Constructor;
-import java.sql.Date;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
@@ -34,6 +33,7 @@
import java.util.regex.Pattern;
import org.apache.commons.lang.ArrayUtils;
+import org.apache.hadoop.hive.common.type.Date;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
@@ -68,7 +68,7 @@
import org.apache.hadoop.hive.ql.udf.generic.*;
import org.apache.hadoop.hive.serde2.ByteStream.Output;
import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -1378,9 +1378,10 @@ private VectorExpression getConstantVectorExpression(Object constantValue, TypeI
case INT_FAMILY:
return new ConstantVectorExpression(outCol, ((Number) constantValue).longValue(), typeInfo);
case DATE:
- return new ConstantVectorExpression(outCol, DateWritable.dateToDays((Date) constantValue), typeInfo);
+ return new ConstantVectorExpression(outCol, DateWritableV2.dateToDays((Date) constantValue), typeInfo);
case TIMESTAMP:
- return new ConstantVectorExpression(outCol, (Timestamp) constantValue, typeInfo);
+ return new ConstantVectorExpression(outCol,
+ ((org.apache.hadoop.hive.common.type.Timestamp) constantValue).toSqlTimestamp(), typeInfo);
case INTERVAL_YEAR_MONTH:
return new ConstantVectorExpression(outCol,
((HiveIntervalYearMonth) constantValue).getTotalMonths(), typeInfo);
@@ -2178,10 +2179,10 @@ public static InConstantType getInConstantTypeFromPrimitiveCategory(PrimitiveCat
return InConstantType.INT_FAMILY;
case DATE:
- return InConstantType.TIMESTAMP;
+ return InConstantType.DATE;
case TIMESTAMP:
- return InConstantType.DATE;
+ return InConstantType.TIMESTAMP;
case FLOAT:
case DOUBLE:
@@ -2785,6 +2786,8 @@ private VectorExpression getCastToString(List childExpr, TypeInfo
return createVectorExpression(CastDecimalToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isDateFamily(inputType)) {
return createVectorExpression(CastDateToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
+ } else if (isTimestampFamily(inputType)) {
+ return createVectorExpression(CastTimestampToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isStringFamily(inputType)) {
return createVectorExpression(CastStringGroupToString.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
}
@@ -2813,6 +2816,8 @@ private VectorExpression getCastToChar(List childExpr, TypeInfo re
return createVectorExpression(CastDecimalToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isDateFamily(inputType)) {
return createVectorExpression(CastDateToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
+ } else if (isTimestampFamily(inputType)) {
+ return createVectorExpression(CastTimestampToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isStringFamily(inputType)) {
return createVectorExpression(CastStringGroupToChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
}
@@ -2841,6 +2846,8 @@ private VectorExpression getCastToVarChar(List childExpr, TypeInfo
return createVectorExpression(CastDecimalToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isDateFamily(inputType)) {
return createVectorExpression(CastDateToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
+ } else if (isTimestampFamily(inputType)) {
+ return createVectorExpression(CastTimestampToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
} else if (isStringFamily(inputType)) {
return createVectorExpression(CastStringGroupToVarChar.class, childExpr, VectorExpressionDescriptor.Mode.PROJECTION, returnType);
}
@@ -3497,7 +3504,9 @@ private Object getVectorTypeScalarValue(ExprNodeConstantDesc constDesc) throws H
Object scalarValue = getScalarValue(constDesc);
switch (primitiveCategory) {
case DATE:
- return new Long(DateWritable.dateToDays((Date) scalarValue));
+ return new Long(DateWritableV2.dateToDays((Date) scalarValue));
+ case TIMESTAMP:
+ return ((org.apache.hadoop.hive.common.type.Timestamp) scalarValue).toSqlTimestamp();
case INTERVAL_YEAR_MONTH:
return ((HiveIntervalYearMonth) scalarValue).getTotalMonths();
default:
@@ -3542,10 +3551,10 @@ private Timestamp evaluateCastToTimestamp(ExprNodeDesc expr) throws HiveExceptio
Object constant = evaluator.evaluate(null);
Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);
- if (!(java instanceof Timestamp)) {
+ if (!(java instanceof org.apache.hadoop.hive.common.type.Timestamp)) {
throw new HiveException("Udf: failed to convert to timestamp");
}
- Timestamp ts = (Timestamp) java;
+ Timestamp ts = ((org.apache.hadoop.hive.common.type.Timestamp) java).toSqlTimestamp();
return ts;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
index d92ec320b5..385eb01e5a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
@@ -19,29 +19,22 @@
package org.apache.hadoop.hive.ql.exec.vector;
import java.io.IOException;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
-import java.util.LinkedList;
import java.util.List;
-import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -50,18 +43,14 @@
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
@@ -70,7 +59,6 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DataOutputBuffer;
@@ -79,7 +67,8 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
-import org.apache.hive.common.util.DateUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class VectorizedBatchUtil {
private static final Logger LOG = LoggerFactory.getLogger(VectorizedBatchUtil.class);
@@ -378,7 +367,7 @@ private static void setVector(Object row,
case DATE: {
LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
- lcv.vector[rowIndex] = ((DateWritable) writableCol).getDays();
+ lcv.vector[rowIndex] = ((DateWritableV2) writableCol).getDays();
lcv.isNull[rowIndex] = false;
} else {
lcv.vector[rowIndex] = 1;
@@ -411,7 +400,7 @@ private static void setVector(Object row,
case TIMESTAMP: {
TimestampColumnVector lcv = (TimestampColumnVector) batch.cols[offset + colIndex];
if (writableCol != null) {
- lcv.set(rowIndex, ((TimestampWritable) writableCol).getTimestamp());
+ lcv.set(rowIndex, ((TimestampWritable) writableCol).getTimestamp().toSqlTimestamp());
lcv.isNull[rowIndex] = false;
} else {
lcv.setNullValue(rowIndex);
@@ -744,7 +733,7 @@ public static void copyNonSelectedColumnVector(
if (sourceColVector.noNulls) {
for (int i = 0; i < size; i++) {
targetTime[i] = sourceTime[i];
- targetNanos[i] = targetNanos[i];
+ targetNanos[i] = sourceNanos[i];
}
} else {
boolean[] sourceIsNull = sourceColVector.isNull;
@@ -753,7 +742,7 @@ public static void copyNonSelectedColumnVector(
for (int i = 0; i < size; i++) {
if (!sourceIsNull[i]) {
targetTime[i] = sourceTime[i];
- targetNanos[i] = targetNanos[i];
+ targetNanos[i] = sourceNanos[i];
} else {
targetTime[i] = 0;
targetNanos[i] = 0;
@@ -895,9 +884,9 @@ public static Writable getPrimitiveWritable(PrimitiveCategory primitiveCategory)
case LONG:
return new LongWritable(0);
case TIMESTAMP:
- return new TimestampWritable(new Timestamp(0));
+ return new TimestampWritable(new Timestamp());
case DATE:
- return new DateWritable(new Date(0));
+ return new DateWritableV2(new Date());
case FLOAT:
return new FloatWritable(0);
case DOUBLE:
@@ -972,9 +961,9 @@ public static StringBuilder debugFormatOneRow(VectorizedRowBatch batch,
} else if (colVector instanceof DecimalColumnVector) {
sb.append(((DecimalColumnVector) colVector).vector[index].toString());
} else if (colVector instanceof TimestampColumnVector) {
- Timestamp timestamp = new Timestamp(0);
+ java.sql.Timestamp timestamp = new java.sql.Timestamp(0);
((TimestampColumnVector) colVector).timestampUpdate(timestamp, index);
- sb.append(timestamp.toString());
+ sb.append(Timestamp.ofEpochMilli(timestamp.getTime(), timestamp.getNanos()).toString());
} else if (colVector instanceof IntervalDayTimeColumnVector) {
HiveIntervalDayTime intervalDayTime = ((IntervalDayTimeColumnVector) colVector).asScratchIntervalDayTime(index);
sb.append(intervalDayTime.toString());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
index 6588385b9f..c5339a8355 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
@@ -18,45 +18,38 @@
package org.apache.hadoop.hive.ql.exec.vector;
import java.io.IOException;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.Arrays;
import java.util.LinkedHashMap;
-import java.util.List;
import java.util.Map;
-import org.apache.hadoop.hive.common.type.HiveChar;
-import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport.Support;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.IOPrepareCache;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
-import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-import org.apache.hadoop.hive.ql.plan.Explain.Vectorization;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.mapred.FileSplit;
-import org.apache.hive.common.util.DateUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
@@ -482,7 +475,7 @@ public void addPartitionColsToBatch(ColumnVector[] cols, Object[] partitionValue
lcv.isNull[0] = true;
lcv.isRepeating = true;
} else {
- lcv.fill(DateWritable.dateToDays((Date) value));
+ lcv.fill(DateWritableV2.dateToDays((Date) value));
}
}
break;
@@ -494,7 +487,7 @@ public void addPartitionColsToBatch(ColumnVector[] cols, Object[] partitionValue
lcv.isNull[0] = true;
lcv.isRepeating = true;
} else {
- lcv.fill((Timestamp) value);
+ lcv.fill(((Timestamp) value).toSqlTimestamp());
}
}
break;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
index e559886659..dfa9f8a00d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
@@ -19,20 +19,27 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import java.sql.Date;
+import java.text.SimpleDateFormat;
+import java.util.TimeZone;
public class CastDateToString extends LongToStringUnaryUDF {
private static final long serialVersionUID = 1L;
protected transient Date dt = new Date(0);
+ private transient SimpleDateFormat formatter;
public CastDateToString() {
super();
+ formatter = new SimpleDateFormat("yyyy-MM-dd");
+ formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
}
public CastDateToString(int inputColumn, int outputColumnNum) {
super(inputColumn, outputColumnNum);
+ formatter = new SimpleDateFormat("yyyy-MM-dd");
+ formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
}
// The assign method will be overridden for CHAR and VARCHAR.
@@ -42,8 +49,8 @@ protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) {
@Override
protected void func(BytesColumnVector outV, long[] vector, int i) {
- dt.setTime(DateWritable.daysToMillis((int) vector[i]));
- byte[] temp = dt.toString().getBytes();
+ dt.setTime(DateWritableV2.daysToMillis((int) vector[i]));
+ byte[] temp = formatter.format(dt).getBytes();
assign(outV, i, temp, temp.length);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
index dbd7c01145..37a48e6bf6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToTimestamp.java
@@ -25,7 +25,7 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
public class CastDateToTimestamp extends VectorExpression {
private static final long serialVersionUID = 1L;
@@ -45,7 +45,7 @@ public CastDateToTimestamp() {
}
private void setDays(TimestampColumnVector timestampColVector, long[] vector, int elementNum) {
- timestampColVector.getScratchTimestamp().setTime(DateWritable.daysToMillis((int) vector[elementNum]));
+ timestampColVector.getScratchTimestamp().setTime(DateWritableV2.daysToMillis((int) vector[elementNum]));
timestampColVector.setFromScratchTimestamp(elementNum);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
index b2185d93e4..f99bd690f1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDate.java
@@ -22,7 +22,6 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
index 6edd7b9639..a6dff12e1a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java
@@ -18,13 +18,13 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hive.common.util.DateParser;
import java.nio.charset.StandardCharsets;
@@ -38,7 +38,6 @@
private final int inputColumn;
- private transient final java.sql.Date sqlDate = new java.sql.Date(0);
private transient final DateParser dateParser = new DateParser();
public CastStringToDate() {
@@ -154,8 +153,9 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
private void evaluate(LongColumnVector outputColVector, BytesColumnVector inV, int i) {
String dateString = new String(inV.vector[i], inV.start[i], inV.length[i], StandardCharsets.UTF_8);
- if (dateParser.parseDate(dateString, sqlDate)) {
- outputColVector.vector[i] = DateWritable.dateToDays(sqlDate);
+ Date hDate = new Date();
+ if (dateParser.parseDate(dateString, hDate)) {
+ outputColVector.vector[i] = DateWritableV2.dateToDays(hDate);
return;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java
new file mode 100644
index 0000000000..d363ad963f
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToChar.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+
+public class CastTimestampToChar extends CastTimestampToString implements TruncStringOutput {
+
+ private static final long serialVersionUID = 1L;
+ private int maxLength; // Must be manually set with setMaxLength.
+
+ public CastTimestampToChar() {
+ super();
+ }
+
+ public CastTimestampToChar(int inputColumn, int outputColumnNum) {
+ super(inputColumn, outputColumnNum);
+ }
+
+ @Override
+ protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) {
+ StringExpr.rightTrimAndTruncate(outV, i, bytes, 0, length, maxLength);
+ }
+
+ @Override
+ public int getMaxLength() {
+ return maxLength;
+ }
+
+ @Override
+ public void setMaxLength(int maxLength) {
+ this.maxLength = maxLength;
+ }
+
+ public String vectorExpressionParameters() {
+ return getColumnParamString(0, inputColumn) + ", maxLength " + maxLength;
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
index 6a41bb0796..ba7e91a178 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
@@ -20,7 +20,7 @@
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* To be used to cast timestamp to decimal.
@@ -40,6 +40,6 @@ public CastTimestampToDate(int inputColumn, int outputColumnNum) {
@Override
protected void func(LongColumnVector outV, TimestampColumnVector inV, int i) {
- outV.vector[i] = DateWritable.millisToDays(inV.getTime(i));
+ outV.vector[i] = DateWritableV2.millisToDays(inV.getTime(i));
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
index ebe18a9540..cf0ee9f614 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
@@ -18,8 +18,10 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.util.TimestampUtils;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
/**
@@ -39,6 +41,8 @@ public CastTimestampToDecimal(int inputColumn, int outputColumnNum) {
@Override
protected void func(DecimalColumnVector outV, TimestampColumnVector inV, int i) {
- outV.set(i, TimestampWritable.getHiveDecimal(inV.asScratchTimestamp(i)));
+ Double timestampDouble = TimestampUtils.getDouble(inV.asScratchTimestamp(i));
+ HiveDecimal result = HiveDecimal.create(timestampDouble.toString());
+ outV.set(i, HiveDecimal.create(timestampDouble.toString()));
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java
new file mode 100644
index 0000000000..0e20cf1b82
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToString.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+
+import java.sql.Timestamp;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
+import java.time.temporal.ChronoField;
+
+public class CastTimestampToString extends TimestampToStringUnaryUDF {
+ private static final long serialVersionUID = 1L;
+ protected transient Timestamp dt = new Timestamp(0);
+ private static final DateTimeFormatter PRINT_FORMATTER;
+
+ static {
+ DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
+ // Date and time parts
+ builder.append(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"));
+ // Fractional part
+ builder.optionalStart().appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, true).optionalEnd();
+ PRINT_FORMATTER = builder.toFormatter();
+ }
+
+ public CastTimestampToString() {
+ super();
+ }
+
+ public CastTimestampToString(int inputColumn, int outputColumnNum) {
+ super(inputColumn, outputColumnNum);
+ }
+
+ // The assign method will be overridden for CHAR and VARCHAR.
+ protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) {
+ outV.setVal(i, bytes, 0, length);
+ }
+
+ @Override
+ protected void func(BytesColumnVector outV, TimestampColumnVector inV, int i) {
+ dt.setTime(inV.time[i]);
+ dt.setNanos(inV.nanos[i]);
+ byte[] temp = LocalDateTime.ofInstant(Instant.ofEpochMilli(inV.time[i]), ZoneOffset.UTC)
+ .withNano(inV.nanos[i])
+ .format(PRINT_FORMATTER).getBytes();
+ assign(outV, i, temp, temp.length);
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java
new file mode 100644
index 0000000000..da740fad47
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToVarChar.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions;
+
+import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+
+public class CastTimestampToVarChar extends CastTimestampToString implements TruncStringOutput {
+
+ private static final long serialVersionUID = 1L;
+ private int maxLength; // Must be manually set with setMaxLength.
+
+ public CastTimestampToVarChar() {
+ super();
+ }
+
+ public CastTimestampToVarChar(int inputColumn, int outputColumnNum) {
+ super(inputColumn, outputColumnNum);
+ }
+
+ @Override
+ protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) {
+ StringExpr.truncate(outV, i, bytes, 0, length, maxLength);
+ }
+
+ @Override
+ public int getMaxLength() {
+ return maxLength;
+ }
+
+ @Override
+ public void setMaxLength(int maxLength) {
+ this.maxLength = maxLength;
+ }
+
+ @Override
+ public String vectorExpressionParameters() {
+ return getColumnParamString(0, inputColumn) + ", maxLength " + maxLength;
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
index d963b87aee..79d1ad1f17 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
@@ -20,14 +20,11 @@
import java.sql.Timestamp;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
// A type date (LongColumnVector storing epoch days) minus a type date produces a
// type interval_day_time (IntervalDayTimeColumnVector storing nanosecond interval in 2 longs).
@@ -96,38 +93,38 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
* conditional checks in the inner loop.
*/
if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(0);
} else if (inputColVector1.isRepeating) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
} else {
for(int i = 0; i != n; i++) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
}
} else if (inputColVector2.isRepeating) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
} else {
for(int i = 0; i != n; i++) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
@@ -136,15 +133,15 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
if (batch.selectedInUse) {
for(int j = 0; j != n; j++) {
int i = sel[j];
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
} else {
for(int i = 0; i != n; i++) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
index 8942b78c89..a1745c8c94 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
@@ -21,15 +21,13 @@
import java.sql.Timestamp;
import java.util.Arrays;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
// A type date (LongColumnVector storing epoch days) minus a type date produces a
// type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs).
@@ -47,7 +45,7 @@ public DateColSubtractDateScalar(int colNum, long value, int outputColumnNum) {
super(outputColumnNum);
this.colNum = colNum;
this.value = new Timestamp(0);
- this.value.setTime(DateWritable.daysToMillis((int) value));
+ this.value.setTime(DateWritableV2.daysToMillis((int) value));
}
public DateColSubtractDateScalar() {
@@ -88,7 +86,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
if (inputColVector1.isRepeating) {
if (inputColVector1.noNulls || !inputIsNull[0]) {
outputIsNull[0] = false;
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[0]));
dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(0);
@@ -111,14 +109,14 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
final int i = sel[j];
// Set isNull before call in case it changes it mind.
outputIsNull[i] = false;
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
} else {
for(int j = 0; j != n; j++) {
final int i = sel[j];
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
@@ -132,7 +130,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
outputColVector.noNulls = true;
}
for(int i = 0; i != n; i++) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
@@ -146,14 +144,14 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
for(int j = 0; j != n; j++) {
int i = sel[j];
outputIsNull[i] = inputIsNull[i];
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
} else {
System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
for(int i = 0; i != n; i++) {
- scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+ scratchTimestamp1.setTime(DateWritableV2.daysToMillis((int) vector1[i]));
dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
@@ -165,7 +163,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
@Override
public String vectorExpressionParameters() {
- return getColumnParamString(0, colNum) + ", val " + value;
+ return getColumnParamString(0, colNum) + ", val " + Date.ofEpochMilli(value.getTime());
}
@Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
index 68b038f4fc..cedbba25dc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
@@ -21,12 +21,10 @@
import java.sql.Timestamp;
import java.util.Arrays;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
// A type date (LongColumnVector storing epoch days) minus a type date produces a
// type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs).
@@ -44,7 +42,7 @@ public DateScalarSubtractDateColumn(long value, int colNum, int outputColumnNum)
super(outputColumnNum);
this.colNum = colNum;
this.value = new Timestamp(0);
- this.value.setTime(DateWritable.daysToMillis((int) value));
+ this.value.setTime(DateWritableV2.daysToMillis((int) value));
}
public DateScalarSubtractDateColumn() {
@@ -91,7 +89,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
if (inputColVector2.isRepeating) {
if (inputColVector2.noNulls || !inputIsNull[0]) {
outputIsNull[0] = false;
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[0]));
dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(0);
} else {
@@ -108,14 +106,14 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
for(int j = 0; j != n; j++) {
int i = sel[j];
outputIsNull[i] = false;
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
} else {
Arrays.fill(outputIsNull, 0, n, false);
for(int i = 0; i != n; i++) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
@@ -129,14 +127,14 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
for(int j = 0; j != n; j++) {
int i = sel[j];
outputIsNull[i] = inputIsNull[i];
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
} else {
System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
for(int i = 0; i != n; i++) {
- scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+ scratchTimestamp2.setTime(DateWritableV2.daysToMillis((int) vector2[i]));
dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
outputColVector.setFromScratchIntervalDayTime(i);
}
@@ -148,7 +146,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
@Override
public String vectorExpressionParameters() {
- return "val " + value + ", " + getColumnParamString(1, colNum);
+ return "val " + org.apache.hadoop.hive.common.type.Date.ofEpochMilli(value.getTime()) + ", " + getColumnParamString(1, colNum);
}
@Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
index e150789c41..e444b68f68 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
@@ -24,7 +24,6 @@
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import java.util.Arrays;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
index 252a8163d9..eff20c948c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DynamicValueVectorExpression.java
@@ -26,7 +26,7 @@
import org.apache.hadoop.hive.ql.exec.vector.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.DynamicValue;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -181,9 +181,9 @@ private void initValue() {
decimalValue = PrimitiveObjectInspectorUtils.getHiveDecimal(val, poi);
break;
case DATE:
- longValue = DateWritable.dateToDays(PrimitiveObjectInspectorUtils.getDate(val, poi));
+ longValue = DateWritableV2.dateToDays(PrimitiveObjectInspectorUtils.getDate(val, poi));
case TIMESTAMP:
- timestampValue = PrimitiveObjectInspectorUtils.getTimestamp(val, poi);
+ timestampValue = PrimitiveObjectInspectorUtils.getTimestamp(val, poi).toSqlTimestamp();
break;
case INTERVAL_YEAR_MONTH:
longValue = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(val, poi).getTotalMonths();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
index 0d9f9f7fc5..f924e2e705 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TimestampToStringUnaryUDF.java
@@ -33,7 +33,7 @@
abstract public class TimestampToStringUnaryUDF extends VectorExpression {
private static final long serialVersionUID = 1L;
- private final int inputColumn;
+ protected final int inputColumn;
public TimestampToStringUnaryUDF(int inputColumn, int outputColumnNum) {
super(outputColumnNum);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
index 55dc461985..1a8bd5085a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
@@ -69,7 +69,6 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
import org.apache.hadoop.io.Text;
-import org.apache.hive.common.util.DateUtils;
/**
* VectorExpressionWritableFactory helper class for generating VectorExpressionWritable objects.
@@ -823,7 +822,7 @@ public VectorExpressionWriter init(SettableDateObjectInspector objInspector) thr
@Override
public Object writeValue(long value) {
- dt.setTime(DateWritable.daysToMillis((int) value));
+ dt.setTime(DateWritableV2.daysToMillis((int) value));
((SettableDateObjectInspector) this.objectInspector).set(obj, dt);
return obj;
}
@@ -833,7 +832,7 @@ public Object setValue(Object field, long value) {
if (null == field) {
field = initValue(null);
}
- dt.setTime(DateWritable.daysToMillis((int) value));
+ dt.setTime(DateWritableV2.daysToMillis((int) value));
((SettableDateObjectInspector) this.objectInspector).set(field, dt);
return field;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
index fa23d89ef6..c3257fa98f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColCol.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -25,15 +26,12 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.io.Text;
import org.apache.hive.common.util.DateParser;
-import java.util.Arrays;
-import java.sql.Date;
-
public class VectorUDFDateAddColCol extends VectorExpression {
private static final long serialVersionUID = 1L;
@@ -43,7 +41,6 @@
protected boolean isPositive = true;
private transient final Text text = new Text();
- private transient final Date date = new Date(0);
private transient final DateParser dateParser = new DateParser();
// Transient members initialized by transientInit method.
@@ -174,7 +171,7 @@ protected long evaluateDate(ColumnVector columnVector, int index, long numDays)
protected long evaluateTimestamp(ColumnVector columnVector, int index, long numDays) {
TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
// Convert to date value (in days)
- long days = DateWritable.millisToDays(tcv.getTime(index));
+ long days = DateWritableV2.millisToDays(tcv.getTime(index));
if (isPositive) {
days += numDays;
} else {
@@ -189,13 +186,14 @@ protected void evaluateString(BytesColumnVector inputColumnVector1, LongColumnVe
outputVector.isNull[index] = true;
} else {
text.set(inputColumnVector1.vector[index], inputColumnVector1.start[index], inputColumnVector1.length[index]);
- boolean parsed = dateParser.parseDate(text.toString(), date);
+ Date hDate = new Date();
+ boolean parsed = dateParser.parseDate(text.toString(), hDate);
if (!parsed) {
outputVector.noNulls = false;
outputVector.isNull[index] = true;
return;
}
- long days = DateWritable.millisToDays(date.getTime());
+ long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
if (isPositive) {
days += numDays;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
index a73d2e6690..30b20c87e3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddColScalar.java
@@ -25,13 +25,12 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.io.Text;
import org.apache.hive.common.util.DateParser;
-import java.sql.Date;
import java.util.Arrays;
public class VectorUDFDateAddColScalar extends VectorExpression {
@@ -44,7 +43,6 @@
private transient final Text text = new Text();
private transient final DateParser dateParser = new DateParser();
- private transient final Date date = new Date(0);
// Transient members initialized by transientInit method.
private transient PrimitiveCategory primitiveCategory;
@@ -305,7 +303,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
protected long evaluateTimestamp(ColumnVector columnVector, int index) {
TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
// Convert to date value (in days)
- long days = DateWritable.millisToDays(tcv.getTime(index));
+ long days = DateWritableV2.millisToDays(tcv.getTime(index));
if (isPositive) {
days += numDays;
} else {
@@ -328,13 +326,14 @@ protected long evaluateDate(ColumnVector columnVector, int index) {
protected void evaluateString(ColumnVector columnVector, LongColumnVector outputVector, int i) {
BytesColumnVector bcv = (BytesColumnVector) columnVector;
text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
- boolean parsed = dateParser.parseDate(text.toString(), date);
+ org.apache.hadoop.hive.common.type.Date hDate = new org.apache.hadoop.hive.common.type.Date();
+ boolean parsed = dateParser.parseDate(text.toString(), hDate);
if (!parsed) {
outputVector.noNulls = false;
outputVector.isNull[i] = true;
return;
}
- long days = DateWritable.millisToDays(date.getTime());
+ long days = DateWritableV2.millisToDays(hDate.toEpochMilli());
if (isPositive) {
days += numDays;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
index 0d418fdcbf..054db661bb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateAddScalarCol.java
@@ -18,17 +18,17 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hive.common.util.DateParser;
import java.nio.charset.StandardCharsets;
-import java.sql.Date;
import java.sql.Timestamp;
import java.util.Arrays;
@@ -45,7 +45,7 @@
protected boolean isPositive = true;
private transient final DateParser dateParser = new DateParser();
- private transient final Date baseDate = new Date(0);
+ private transient final Date baseDate = new Date();
// Transient members initialized by transientInit method.
private transient PrimitiveCategory primitiveCategory;
@@ -97,11 +97,11 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
switch (primitiveCategory) {
case DATE:
- baseDate.setTime(DateWritable.daysToMillis((int) longValue));
+ baseDate.setTimeInMillis(DateWritableV2.daysToMillis((int) longValue));
break;
case TIMESTAMP:
- baseDate.setTime(timestampValue.getTime());
+ baseDate.setTimeInMillis(timestampValue.getTime());
break;
case STRING:
@@ -135,7 +135,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
// We do not need to do a column reset since we are carefully changing the output.
outputColVector.isRepeating = false;
- long baseDateDays = DateWritable.millisToDays(baseDate.getTime());
+ long baseDateDays = DateWritableV2.millisToDays(baseDate.toEpochMilli());
if (inputCol.isRepeating) {
if (inputCol.noNulls || !inputCol.isNull[0]) {
outputColVector.isNull[0] = false;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
index 84ee94432b..ce8fc5a14a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColCol.java
@@ -20,13 +20,12 @@
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -221,7 +220,7 @@ public void copySelected(
String string = new String(input.vector[0], input.start[0], input.length[0]);
try {
date.setTime(formatter.parse(string).getTime());
- output.vector[0] = DateWritable.dateToDays(date);
+ output.vector[0] = DateWritableV2.dateToDays(date);
} catch (ParseException e) {
output.isNull[0] = true;
}
@@ -275,7 +274,7 @@ private void setDays(BytesColumnVector input, LongColumnVector output, int i) {
String string = new String(input.vector[i], input.start[i], input.length[i]);
try {
date.setTime(formatter.parse(string).getTime());
- output.vector[i] = DateWritable.dateToDays(date);
+ output.vector[i] = DateWritableV2.dateToDays(date);
} catch (ParseException e) {
output.isNull[i] = true;
output.noNulls = false;
@@ -298,7 +297,7 @@ public void copySelected(
if (!input.isNull[0]) {
date.setTime(input.getTime(0));
- output.vector[0] = DateWritable.dateToDays(date);
+ output.vector[0] = DateWritableV2.dateToDays(date);
}
return;
}
@@ -311,12 +310,12 @@ public void copySelected(
for (int j = 0; j < size; j++) {
int i = sel[j];
date.setTime(input.getTime(i));
- output.vector[i] = DateWritable.dateToDays(date);
+ output.vector[i] = DateWritableV2.dateToDays(date);
}
} else {
for (int i = 0; i < size; i++) {
date.setTime(input.getTime(i));
- output.vector[i] = DateWritable.dateToDays(date);
+ output.vector[i] = DateWritableV2.dateToDays(date);
}
}
} else {
@@ -335,14 +334,14 @@ public void copySelected(
int i = sel[j];
if (!input.isNull[i]) {
date.setTime(input.getTime(i));
- output.vector[i] = DateWritable.dateToDays(date);
+ output.vector[i] = DateWritableV2.dateToDays(date);
}
}
} else {
for (int i = 0; i < size; i++) {
if (!input.isNull[i]) {
date.setTime(input.getTime(i));
- output.vector[i] = DateWritable.dateToDays(date);
+ output.vector[i] = DateWritableV2.dateToDays(date);
}
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
index 55af413286..caedc805a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffColScalar.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
-import org.apache.hadoop.hive.metastore.parser.ExpressionTree.Operator;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -26,12 +25,11 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.io.Text;
-import java.nio.charset.StandardCharsets;
import java.sql.Date;
import java.sql.Timestamp;
import java.text.ParseException;
@@ -106,7 +104,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
case TIMESTAMP:
date.setTime(timestampValue.getTime());
- baseDate = DateWritable.dateToDays(date);
+ baseDate = DateWritableV2.dateToDays(date);
break;
case STRING:
@@ -114,7 +112,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
case VARCHAR:
try {
date.setTime(formatter.parse(new String(bytesValue, "UTF-8")).getTime());
- baseDate = DateWritable.dateToDays(date);
+ baseDate = DateWritableV2.dateToDays(date);
break;
} catch (Exception e) {
outputColVector.noNulls = false;
@@ -346,7 +344,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
protected int evaluateTimestamp(ColumnVector columnVector, int index) {
TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
date.setTime(tcv.getTime(index));
- return DateWritable.dateToDays(date) - baseDate;
+ return DateWritableV2.dateToDays(date) - baseDate;
}
protected int evaluateDate(ColumnVector columnVector, int index) {
@@ -359,7 +357,7 @@ protected void evaluateString(ColumnVector columnVector, LongColumnVector output
text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
try {
date.setTime(formatter.parse(text.toString()).getTime());
- output.vector[i] = DateWritable.dateToDays(date) - baseDate;
+ output.vector[i] = DateWritableV2.dateToDays(date) - baseDate;
} catch (ParseException e) {
output.vector[i] = 1;
output.isNull[i] = true;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
index c51d3cd51a..28addf7b80 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateDiffScalarCol.java
@@ -25,7 +25,7 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.io.Text;
@@ -105,7 +105,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
case TIMESTAMP:
date.setTime(timestampValue.getTime());
- baseDate = DateWritable.dateToDays(date);
+ baseDate = DateWritableV2.dateToDays(date);
break;
case STRING:
@@ -113,7 +113,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
case VARCHAR:
try {
date.setTime(formatter.parse(new String(stringValue, "UTF-8")).getTime());
- baseDate = DateWritable.dateToDays(date);
+ baseDate = DateWritableV2.dateToDays(date);
break;
} catch (Exception e) {
outputColVector.noNulls = false;
@@ -346,7 +346,7 @@ public void evaluate(VectorizedRowBatch batch) throws HiveException {
protected int evaluateTimestamp(ColumnVector columnVector, int index) {
TimestampColumnVector tcv = (TimestampColumnVector) columnVector;
date.setTime(tcv.getTime(index));
- return baseDate - DateWritable.dateToDays(date);
+ return baseDate - DateWritableV2.dateToDays(date);
}
protected int evaluateDate(ColumnVector columnVector, int index) {
@@ -359,7 +359,7 @@ protected void evaluateString(ColumnVector columnVector, LongColumnVector output
text.set(bcv.vector[i], bcv.start[i], bcv.length[i]);
try {
date.setTime(formatter.parse(text.toString()).getTime());
- output.vector[i] = baseDate - DateWritable.dateToDays(date);
+ output.vector[i] = baseDate - DateWritableV2.dateToDays(date);
} catch (ParseException e) {
output.vector[i] = 1;
output.isNull[i] = true;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
index 202f5d8a6f..8e5f9dae1d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateLong.java
@@ -18,14 +18,6 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
-import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-
-import java.io.UnsupportedEncodingException;
-import java.sql.Date;
-import java.text.SimpleDateFormat;
-
/**
* Vectorized version of TO_DATE(TIMESTAMP)/TO_DATE(DATE).
* As TO_DATE() now returns DATE type, this should be the same behavior as the DATE cast operator.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
index 8d87ef7dff..accf32c755 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateString.java
@@ -18,17 +18,6 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
-import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.exec.vector.VectorGroupByOperator;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hive.common.util.DateParser;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import java.sql.Date;
-
/**
* Vectorized version of TO_DATE(STRING)
* As TO_DATE() now returns DATE type, this should be the same behavior as the DATE cast operator.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
index 7e7080ceed..5b497e5ddc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFDateTimestamp.java
@@ -18,15 +18,6 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
-import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-
-import java.io.UnsupportedEncodingException;
-import java.sql.Date;
-import java.text.SimpleDateFormat;
-
/**
* Vectorized version of TO_DATE(timestamp).
* As TO_DATE() now returns DATE type, this should be the same behavior as the DATE cast operator.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
index a1167b2e3f..837de9d0ca 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
@@ -20,13 +20,14 @@
import java.util.Arrays;
import java.util.Calendar;
+import java.util.TimeZone;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hive.common.util.DateUtils;
@@ -43,7 +44,8 @@
protected final int colNum;
protected final int field;
- protected transient final Calendar calendar = Calendar.getInstance();
+ protected transient final Calendar calendar = Calendar.getInstance(
+ TimeZone.getTimeZone("UTC"));
public VectorUDFTimestampFieldDate(int field, int colNum, int outputColumnNum) {
super(outputColumnNum);
@@ -69,7 +71,7 @@ public void transientInit() throws HiveException {
}
protected long getDateField(long days) {
- calendar.setTimeInMillis(DateWritable.daysToMillis((int) days));
+ calendar.setTimeInMillis(DateWritableV2.daysToMillis((int) days));
return calendar.get(field);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
index 3515329044..1f83eadcbb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFUnixTimeStampDate.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.hive.ql.exec.vector.expressions;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* Return Unix Timestamp.
@@ -28,7 +28,7 @@
private static final long serialVersionUID = 1L;
- private DateWritable dateWritable;
+ private DateWritableV2 dateWritable;
@Override
protected long getDateField(long days) {
@@ -39,7 +39,7 @@ protected long getDateField(long days) {
public VectorUDFUnixTimeStampDate(int colNum, int outputColumnNum) {
/* not a real field */
super(-1, colNum, outputColumnNum);
- dateWritable = new DateWritable();
+ dateWritable = new DateWritableV2();
}
public VectorUDFUnixTimeStampDate() {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
index 5b2cb4c340..5b8c018f91 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
@@ -17,37 +17,17 @@
*/
package org.apache.hadoop.hive.ql.exec.vector.udf;
-import java.sql.Date;
-import java.sql.Timestamp;
-import java.util.Map;
-
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.MapredContext;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
-import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.SettableMapObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.*;
-import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.Text;
/**
* A VectorUDFAdaptor is a vectorized expression for invoking a custom
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
index 2b005c40fd..dda4c89fca 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/BatchToRowReader.java
@@ -24,6 +24,7 @@
import java.util.Arrays;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -46,7 +47,6 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -423,18 +423,18 @@ public static HiveDecimalWritable nextDecimal(ColumnVector vector,
}
}
- public static DateWritable nextDate(ColumnVector vector,
- int row,
- Object previous) {
+ public static DateWritableV2 nextDate(ColumnVector vector,
+ int row,
+ Object previous) {
if (vector.isRepeating) {
row = 0;
}
if (vector.noNulls || !vector.isNull[row]) {
- DateWritable result;
- if (previous == null || previous.getClass() != DateWritable.class) {
- result = new DateWritable();
+ DateWritableV2 result;
+ if (previous == null || previous.getClass() != DateWritableV2.class) {
+ result = new DateWritableV2();
} else {
- result = (DateWritable) previous;
+ result = (DateWritableV2) previous;
}
int date = (int) ((LongColumnVector) vector).vector[row];
result.set(date);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
index 68e88cf105..612a2ffce7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
@@ -58,6 +58,7 @@ public static Reader createReader(FileSystem fs,
public static class ReaderOptions extends org.apache.orc.OrcFile.ReaderOptions {
public ReaderOptions(Configuration conf) {
super(conf);
+ useUTCTimestamp(true);
}
public ReaderOptions filesystem(FileSystem fs) {
@@ -79,6 +80,11 @@ public ReaderOptions orcTail(OrcTail orcTail) {
super.orcTail(orcTail);
return this;
}
+
+ public ReaderOptions useUTCTimestamp(boolean value) {
+ super.useUTCTimestamp(value);
+ return this;
+ }
}
public static ReaderOptions readerOptions(Configuration conf) {
@@ -104,6 +110,7 @@ public static Reader createReader(Path path,
WriterOptions(Properties tableProperties, Configuration conf) {
super(tableProperties, conf);
+ useUTCTimestamp(true);
}
/**
@@ -273,6 +280,11 @@ public WriterOptions physicalWriter(PhysicalWriter writer) {
return this;
}
+ public WriterOptions useUTCTimestamp(boolean value) {
+ super.useUTCTimestamp(value);
+ return this;
+ }
+
ObjectInspector getInspector() {
return inspector;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
index 049dbd38e7..de4fa8b674 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
@@ -29,7 +29,6 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
@@ -116,6 +115,7 @@
import org.apache.hadoop.util.StringUtils;
import org.apache.hive.common.util.Ref;
import org.apache.orc.ColumnStatistics;
+import org.apache.orc.FileFormatException;
import org.apache.orc.OrcProto;
import org.apache.orc.OrcProto.Footer;
import org.apache.orc.OrcProto.Type;
@@ -2167,7 +2167,7 @@ static Reader createOrcReaderForSplit(Configuration conf, OrcSplit orcSplit) thr
public static boolean[] pickStripesViaTranslatedSarg(SearchArgument sarg,
OrcFile.WriterVersion writerVersion, List types,
- List stripeStats, int stripeCount) {
+ List stripeStats, int stripeCount) throws FileFormatException {
LOG.info("Translated ORC pushdown predicate: " + sarg);
assert sarg != null;
if (stripeStats == null || writerVersion == OrcFile.WriterVersion.ORIGINAL) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
index 5b001a0bbc..52dad88fee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
@@ -35,7 +35,7 @@
import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -400,18 +400,18 @@ static HiveDecimalWritable nextDecimal(ColumnVector vector,
}
}
- static DateWritable nextDate(ColumnVector vector,
- int row,
- Object previous) {
+ static DateWritableV2 nextDate(ColumnVector vector,
+ int row,
+ Object previous) {
if (vector.isRepeating) {
row = 0;
}
if (vector.noNulls || !vector.isNull[row]) {
- DateWritable result;
- if (previous == null || previous.getClass() != DateWritable.class) {
- result = new DateWritable();
+ DateWritableV2 result;
+ if (previous == null || previous.getClass() != DateWritableV2.class) {
+ result = new DateWritableV2();
} else {
- result = (DateWritable) previous;
+ result = (DateWritableV2) previous;
}
int date = (int) ((LongColumnVector) vector).vector[row];
result.set(date);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
index 71682af364..7468ff530c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
@@ -19,12 +19,10 @@
package org.apache.hadoop.hive.ql.io.orc;
import java.io.IOException;
-import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -41,6 +39,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
@@ -61,9 +60,6 @@
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.orc.PhysicalWriter;
-
/**
* An ORC file writer. The file is divided into stripes, which is the natural
* unit of work when reading. Each stripe is buffered in memory until the
@@ -195,9 +191,9 @@ static void setColumn(int rowId, ColumnVector column,
}
case TIMESTAMP: {
TimestampColumnVector vector = (TimestampColumnVector) column;
- Timestamp ts = ((TimestampObjectInspector) inspector)
- .getPrimitiveJavaObject(obj);
- vector.set(rowId, ts);
+ vector.setIsUTC(true);
+ vector.set(rowId, ((TimestampObjectInspector) inspector)
+ .getPrimitiveJavaObject(obj).toSqlTimestamp());
break;
}
case DATE: {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java
index 646b214249..c9078be208 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedTreeReaderFactory.java
@@ -23,14 +23,12 @@
import java.util.Arrays;
import java.util.List;
-import org.apache.curator.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch;
import org.apache.hadoop.hive.common.io.encoded.EncodedColumnBatch.ColumnStreamData;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.io.orc.encoded.Reader.OrcEncodedColumnBatch;
import org.apache.orc.CompressionCodec;
import org.apache.orc.TypeDescription;
-import org.apache.orc.TypeDescription.Category;
import org.apache.orc.impl.InStream;
import org.apache.orc.impl.PositionProvider;
import org.apache.orc.impl.SettableUncompressedStream;
@@ -1053,7 +1051,8 @@ private DecimalStreamReader(int columnId, int precision, int scale,
boolean isFileCompressed,
OrcProto.ColumnEncoding encoding, TreeReaderFactory.Context context,
List vectors) throws IOException {
- super(columnId, presentStream, valueStream, scaleStream, encoding, context);
+ super(columnId, presentStream, valueStream, scaleStream, encoding,
+ precision, scale, context);
this._isFileCompressed = isFileCompressed;
this._presentStream = presentStream;
this._valueStream = valueStream;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
index 8be8d13476..998d81a875 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
@@ -14,15 +14,15 @@
package org.apache.hadoop.hive.ql.io.parquet.convert;
import java.math.BigDecimal;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -358,13 +358,13 @@ protected TimestampWritable convert(Binary binary) {
};
}
},
- EDATE_CONVERTER(DateWritable.class) {
+ EDATE_CONVERTER(DateWritableV2.class) {
@Override
PrimitiveConverter getConverter(final PrimitiveType type, final int index, final ConverterParent parent, TypeInfo hiveTypeInfo) {
return new PrimitiveConverter() {
@Override
public void addInt(final int value) {
- parent.set(index, new DateWritable(value));
+ parent.set(index, new DateWritableV2(value));
}
};
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
index 677fb53028..bf78d8cc5b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
@@ -13,12 +13,13 @@
*/
package org.apache.hadoop.hive.ql.io.parquet.timestamp;
-import java.sql.Timestamp;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
+import org.apache.hadoop.hive.common.type.Timestamp;
+
import jodd.datetime.JDateTime;
/**
@@ -58,7 +59,7 @@ public static Calendar getCalendar(boolean skipConversion) {
public static NanoTime getNanoTime(Timestamp ts, boolean skipConversion) {
Calendar calendar = getCalendar(skipConversion);
- calendar.setTime(ts);
+ calendar.setTimeInMillis(ts.toEpochMilli());
int year = calendar.get(Calendar.YEAR);
if (calendar.get(Calendar.ERA) == GregorianCalendar.BC) {
year = 1 - year;
@@ -106,8 +107,7 @@ public static Timestamp getTimestamp(NanoTime nt, boolean skipConversion) {
calendar.set(Calendar.HOUR_OF_DAY, hour);
calendar.set(Calendar.MINUTE, minutes);
calendar.set(Calendar.SECOND, seconds);
- Timestamp ts = new Timestamp(calendar.getTimeInMillis());
- ts.setNanos((int) nanos);
+ Timestamp ts = Timestamp.ofEpochMilli(calendar.getTimeInMillis(), (int) nanos);
return ts;
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
index 954e29bc05..cf92360b97 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReader.java
@@ -18,10 +18,10 @@
package org.apache.hadoop.hive.ql.io.parquet.vector;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.parquet.column.Dictionary;
import java.io.IOException;
-import java.sql.Timestamp;
/**
* The interface to wrap the underlying Parquet dictionary and non dictionary encoded page reader.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
index 0406308dcc..50f322275b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/ParquetDataColumnReaderFactory.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.io.parquet.vector;
import org.apache.hadoop.hive.common.type.HiveBaseChar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
@@ -40,7 +41,6 @@
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
-import java.sql.Timestamp;
import java.util.Arrays;
/**
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
index e89a736043..e4c61562e4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
@@ -13,6 +13,7 @@
*/
package org.apache.hadoop.hive.ql.io.parquet.vector;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -387,7 +388,7 @@ private void readTimestamp(int total, TimestampColumnVector c, int rowId) throws
switch (descriptor.getType()) {
//INT64 is not yet supported
case INT96:
- c.set(rowId, dataColumn.readTimestamp());
+ c.set(rowId, dataColumn.readTimestamp().toSqlTimestamp());
break;
default:
throw new IOException(
@@ -510,7 +511,7 @@ private void decodeDictionaryIds(
case TIMESTAMP:
for (int i = rowId; i < rowId + num; ++i) {
((TimestampColumnVector) column)
- .set(i, dictionary.readTimestamp((int) dictionaryIds.vector[i]));
+ .set(i, dictionary.readTimestamp((int) dictionaryIds.vector[i]).toSqlTimestamp());
}
break;
case INTERVAL_DAY_TIME:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
index cf1210befc..3d61c33afd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
@@ -13,12 +13,12 @@
*/
package org.apache.hadoop.hive.ql.io.parquet.write;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
@@ -46,9 +46,9 @@
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.Type;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
@@ -550,7 +550,7 @@ public DateDataWriter(DateObjectInspector inspector) {
@Override
public void write(Object value) {
Date vDate = inspector.getPrimitiveJavaObject(value);
- recordConsumer.addInteger(DateWritable.dateToDays(vDate));
+ recordConsumer.addInteger(DateWritableV2.dateToDays(vDate));
}
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
index 44687ef471..7af6dabdf5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
@@ -54,7 +54,7 @@
import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo.ForeignKeyCol;
import org.apache.hadoop.hive.ql.plan.DescTableDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hive.common.util.HiveStringUtils;
import com.google.common.collect.Lists;
@@ -107,7 +107,7 @@ private static String convertToString(org.apache.hadoop.hive.metastore.api.Date
return "";
}
- DateWritable writableValue = new DateWritable((int) val.getDaysSinceEpoch());
+ DateWritableV2 writableValue = new DateWritableV2((int) val.getDaysSinceEpoch());
return writableValue.toString();
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
index 12af94e337..d950991a4c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ExprNodeConverter.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hive.ql.optimizer.calcite.translator;
import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
@@ -45,9 +43,11 @@
import org.apache.calcite.util.DateString;
import org.apache.calcite.util.TimeString;
import org.apache.calcite.util.TimestampString;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.Hive;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
index 7a482d968f..3b3dd2e253 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java
@@ -19,14 +19,11 @@
import java.math.BigDecimal;
import java.math.BigInteger;
-import java.sql.Timestamp;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Calendar;
-import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
-import java.util.Locale;
import java.util.Map;
import org.apache.calcite.avatica.util.TimeUnit;
@@ -53,12 +50,14 @@
import org.apache.calcite.util.DateString;
import org.apache.calcite.util.NlsString;
import org.apache.calcite.util.TimestampString;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.Decimal128;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -393,8 +392,6 @@ private RexNode handleExplicitCast(ExprNodeGenericFuncDesc func, List c
GenericUDF udf = func.getGenericUDF();
if ((udf instanceof GenericUDFToChar) || (udf instanceof GenericUDFToVarchar)
|| (udf instanceof GenericUDFToDecimal) || (udf instanceof GenericUDFToDate)
- // Calcite can not specify the scale for timestamp. As a result, all
- // the millisecond part will be lost
|| (udf instanceof GenericUDFTimestamp) || (udf instanceof GenericUDFToTimestampLocalTZ)
|| (udf instanceof GenericUDFToBinary) || castExprUsingUDFBridge(udf)) {
castExpr = cluster.getRexBuilder().makeAbstractCast(
@@ -676,9 +673,9 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticEx
calciteLiteral = rexBuilder.makeCharLiteral(asUnicodeString((String) value));
break;
case DATE:
- final Calendar cal = Calendar.getInstance(Locale.getDefault());
- cal.setTime((Date) value);
- calciteLiteral = rexBuilder.makeDateLiteral(DateString.fromCalendarFields(cal));
+ final Date date = (Date) value;
+ calciteLiteral = rexBuilder.makeDateLiteral(
+ DateString.fromDaysSinceEpoch(date.toEpochDay()));
break;
case TIMESTAMP:
final TimestampString tsString;
@@ -686,9 +683,7 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticEx
tsString = TimestampString.fromCalendarFields((Calendar) value);
} else {
final Timestamp ts = (Timestamp) value;
- final Calendar calt = Calendar.getInstance(Locale.getDefault());
- calt.setTimeInMillis(ts.getTime());
- tsString = TimestampString.fromCalendarFields(calt).withNanos(ts.getNanos());
+ tsString = TimestampString.fromMillisSinceEpoch(ts.toEpochMilli()).withNanos(ts.getNanos());
}
// Must call makeLiteral, not makeTimestampLiteral
// to have the RexBuilder.roundTime logic kick in
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 596eddedf5..4625495046 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -21,7 +21,6 @@
import java.io.IOException;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
-import java.sql.Date;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -43,6 +42,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -60,7 +60,6 @@
import org.apache.hadoop.hive.ql.QueryProperties;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.cache.results.CacheUsage;
-import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -96,7 +95,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentUser;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -109,9 +108,6 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
-import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME;
-import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog;
-
/**
* BaseSemanticAnalyzer.
*
@@ -2098,8 +2094,8 @@ static void normalizeColSpec(Map partSpec, String colName,
private static String normalizeDateCol(
Object colValue, String originalColSpec) throws SemanticException {
Date value;
- if (colValue instanceof DateWritable) {
- value = ((DateWritable) colValue).get(false); // Time doesn't matter.
+ if (colValue instanceof DateWritableV2) {
+ value = ((DateWritableV2) colValue).get(); // Time doesn't matter.
} else if (colValue instanceof Date) {
value = (Date) colValue;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
index 2506172ec4..d8c7d7fb1b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
@@ -19,8 +19,7 @@
package org.apache.hadoop.hive.ql.parse;
import java.math.BigDecimal;
-import java.sql.Date;
-import java.sql.Timestamp;
+
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -33,10 +32,12 @@
import org.apache.calcite.rel.RelNode;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.math.NumberUtils;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZUtil;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.ErrorMsg;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
index 607545dc7c..31c96826b0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/ColumnStatisticsObjTranslator.java
@@ -35,7 +35,7 @@
import org.apache.hadoop.hive.metastore.columnstats.cache.LongColumnStatsDataInspector;
import org.apache.hadoop.hive.metastore.columnstats.cache.StringColumnStatsDataInspector;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -197,10 +197,10 @@ private static void unpackDateStats(ObjectInspector oi, Object o, String fName,
long v = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getDateStats().setNumDVs(v);
} else if (fName.equals("max")) {
- DateWritable v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
+ DateWritableV2 v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
statsObj.getStatsData().getDateStats().setHighValue(new Date(v.getDays()));
} else if (fName.equals("min")) {
- DateWritable v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
+ DateWritableV2 v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
statsObj.getStatsData().getDateStats().setLowValue(new Date(v.getDays()));
} else if (fName.equals("ndvbitvector")) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
index 21164b7013..e96fe8a57a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDateFloor.java
@@ -25,6 +25,7 @@
import java.util.Map;
import java.util.NoSuchElementException;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
@@ -59,13 +60,9 @@ public TimestampWritable evaluate(TimestampWritable t) {
if (t == null) {
return null;
}
- final long originalTimestamp = t.getTimestamp().getTime(); // default
- final long originalTimestampUTC = new DateTime(originalTimestamp)
- .withZoneRetainFields(DateTimeZone.UTC).getMillis(); // default -> utc
- final long newTimestampUTC = granularity.truncate(originalTimestampUTC); // utc
- final long newTimestamp = new DateTime(newTimestampUTC, DateTimeZone.UTC)
- .withZoneRetainFields(DateTimeZone.getDefault()).getMillis(); // utc -> default
- resultTS.setTime(newTimestamp);
+ final long originalTimestamp = t.getTimestamp().toEpochMilli();
+ final long newTimestamp = granularity.truncate(originalTimestamp);
+ resultTS.set(Timestamp.ofEpochMilli(newTimestamp));
return resultTS;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
index f7749547de..f63a726ade 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
@@ -18,23 +18,23 @@
package org.apache.hadoop.hive.ql.udf;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthDate;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthString;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
/**
* UDFDayOfMonth.
@@ -51,66 +51,78 @@
+ " > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + " 30")
@VectorizedExpressions({VectorUDFDayOfMonthDate.class, VectorUDFDayOfMonthString.class, VectorUDFDayOfMonthTimestamp.class})
@NDV(maxNdv = 31)
-public class UDFDayOfMonth extends UDF {
- private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private final Calendar calendar = Calendar.getInstance();
-
- private final IntWritable result = new IntWritable();
+public class UDFDayOfMonth extends GenericUDF {
- public UDFDayOfMonth() {
- }
+ private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+ private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+ private final IntWritable output = new IntWritable();
- /**
- * Get the day of month from a date string.
- *
- * @param dateString
- * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
- * "yyyy-MM-dd".
- * @return an int from 1 to 31. null if the dateString is not a valid date
- * string.
- */
- public IntWritable evaluate(Text dateString) {
-
- if (dateString == null) {
- return null;
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ checkArgsSize(arguments, 1, 1);
+ checkArgPrimitive(arguments, 0);
+ switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+ case INTERVAL_DAY_TIME:
+ inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
+ converters[0] = ObjectInspectorConverters.getConverter(
+ arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ obtainDateConverter(arguments, 0, inputTypes, converters);
+ break;
+ default:
+ // build error message
+ StringBuilder sb = new StringBuilder();
+ sb.append(getFuncName());
+ sb.append(" does not take ");
+ sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+ sb.append(" type");
+ throw new UDFArgumentTypeException(0, sb.toString());
}
- try {
- Date date = formatter.parse(dateString.toString());
- calendar.setTime(date);
- result.set(calendar.get(Calendar.DAY_OF_MONTH));
- return result;
- } catch (ParseException e) {
- return null;
- }
+ ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+ return outputOI;
}
- public IntWritable evaluate(DateWritable d) {
- if (d == null) {
- return null;
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ switch (inputTypes[0]) {
+ case INTERVAL_DAY_TIME:
+ HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
+ if (intervalDayTime == null) {
+ return null;
+ }
+ output.set(intervalDayTime.getDays());
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ Date date = getDateValue(arguments, 0, inputTypes, converters);
+ if (date == null) {
+ return null;
+ }
+ output.set(date.getLocalDate().getDayOfMonth());
}
-
- calendar.setTime(d.get(false)); // Time doesn't matter.
- result.set(calendar.get(Calendar.DAY_OF_MONTH));
- return result;
+ return output;
}
- public IntWritable evaluate(TimestampWritable t) {
- if (t == null) {
- return null;
- }
-
- calendar.setTime(t.getTimestamp());
- result.set(calendar.get(Calendar.DAY_OF_MONTH));
- return result;
+ @Override
+ protected String getFuncName() {
+ return "day";
}
- public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
- if (i == null) {
- return null;
- }
-
- result.set(i.getHiveIntervalDayTime().getDays());
- return result;
+ @Override
+ public String getDisplayString(String[] children) {
+ return getStandardDisplayString(getFuncName(), children);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
index 88e6d9466b..ea264042cb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfWeek.java
@@ -18,11 +18,7 @@
package org.apache.hadoop.hive.ql.udf;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
@@ -30,7 +26,7 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfWeekString;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfWeekTimestamp;
import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -51,8 +47,6 @@
@VectorizedExpressions({VectorUDFDayOfWeekDate.class, VectorUDFDayOfWeekString.class, VectorUDFDayOfWeekTimestamp.class})
@NDV(maxNdv = 7)
public class UDFDayOfWeek extends UDF {
- private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private final Calendar calendar = Calendar.getInstance();
private final IntWritable result = new IntWritable();
@@ -73,22 +67,20 @@ public IntWritable evaluate(Text dateString) {
return null;
}
try {
- Date date = formatter.parse(dateString.toString());
- calendar.setTime(date);
- result.set(calendar.get(Calendar.DAY_OF_WEEK));
+ Date date = Date.valueOf(dateString.toString());
+ result.set(date.getLocalDate().getDayOfWeek().getValue());
return result;
- } catch (ParseException e) {
+ } catch (IllegalArgumentException e) {
return null;
}
}
- public IntWritable evaluate(DateWritable d) {
+ public IntWritable evaluate(DateWritableV2 d) {
if (d == null) {
return null;
}
- calendar.setTime(d.get(false)); // Time doesn't matter.
- result.set(calendar.get(Calendar.DAY_OF_WEEK));
+ result.set(d.get().getLocalDate().getDayOfWeek().getValue());
return result;
}
@@ -97,8 +89,7 @@ public IntWritable evaluate(TimestampWritable t) {
return null;
}
- calendar.setTime(t.getTimestamp());
- result.set(calendar.get(Calendar.DAY_OF_WEEK));
+ result.set(t.getTimestamp().getLocalDateTime().getDayOfWeek().plus(1).getValue());
return result;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
index 8f531fdca6..3cee0c1d1c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFFromUnixTime.java
@@ -20,6 +20,7 @@
import java.text.SimpleDateFormat;
import java.util.Date;
+import java.util.TimeZone;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
@@ -119,6 +120,7 @@ public Text evaluate(IntWritable unixtime, Text format) {
private Text eval(long unixtime, Text format) {
if (!format.equals(lastFormat)) {
formatter = new SimpleDateFormat(format.toString());
+ formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
lastFormat.set(format);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
index a0c4e96a40..5e3eefe6ac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
@@ -18,22 +18,23 @@
package org.apache.hadoop.hive.ql.udf;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourDate;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourString;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
/**
* UDFHour.
@@ -51,62 +52,78 @@
+ " > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + " 12")
@VectorizedExpressions({VectorUDFHourDate.class, VectorUDFHourString.class, VectorUDFHourTimestamp.class})
@NDV(maxNdv = 24)
-public class UDFHour extends UDF {
- private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
- private final Calendar calendar = Calendar.getInstance();
-
- private final IntWritable result = new IntWritable();
-
- public UDFHour() {
- }
+public class UDFHour extends GenericUDF {
- /**
- * Get the hour from a date string.
- *
- * @param dateString
- * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
- * "yyyy-MM-dd".
- * @return an int from 0 to 23. null if the dateString is not a valid date
- * string.
- */
- public IntWritable evaluate(Text dateString) {
+ private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+ private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+ private final IntWritable output = new IntWritable();
- if (dateString == null) {
- return null;
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ checkArgsSize(arguments, 1, 1);
+ checkArgPrimitive(arguments, 0);
+ switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+ case INTERVAL_DAY_TIME:
+ inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
+ converters[0] = ObjectInspectorConverters.getConverter(
+ arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ obtainTimestampConverter(arguments, 0, inputTypes, converters);
+ break;
+ default:
+ // build error message
+ StringBuilder sb = new StringBuilder();
+ sb.append(getFuncName());
+ sb.append(" does not take ");
+ sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+ sb.append(" type");
+ throw new UDFArgumentTypeException(0, sb.toString());
}
- try {
- Date date = null;
- try {
- date = formatter1.parse(dateString.toString());
- } catch (ParseException e) {
- date = formatter2.parse(dateString.toString());
- }
- calendar.setTime(date);
- result.set(calendar.get(Calendar.HOUR_OF_DAY));
- return result;
- } catch (ParseException e) {
- return null;
- }
+ ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+ return outputOI;
}
- public IntWritable evaluate(TimestampWritable t) {
- if (t == null) {
- return null;
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ switch (inputTypes[0]) {
+ case INTERVAL_DAY_TIME:
+ HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
+ if (intervalDayTime == null) {
+ return null;
+ }
+ output.set(intervalDayTime.getHours());
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ Timestamp ts = getTimestampValue(arguments, 0, converters);
+ if (ts == null) {
+ return null;
+ }
+ output.set(ts.getHours());
}
-
- calendar.setTime(t.getTimestamp());
- result.set(calendar.get(Calendar.HOUR_OF_DAY));
- return result;
+ return output;
}
- public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
- if (i == null) {
- return null;
- }
+ @Override
+ protected String getFuncName() {
+ return "hour";
+ }
- result.set(i.getHiveIntervalDayTime().getHours());
- return result;
+ @Override
+ public String getDisplayString(String[] children) {
+ return getStandardDisplayString(getFuncName(), children);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
index 306d45816c..23f09d2431 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
@@ -18,22 +18,23 @@
package org.apache.hadoop.hive.ql.udf;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteDate;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteString;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
/**
* UDFMinute.
@@ -51,62 +52,78 @@
+ " > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + " 58")
@VectorizedExpressions({VectorUDFMinuteDate.class, VectorUDFMinuteString.class, VectorUDFMinuteTimestamp.class})
@NDV(maxNdv = 60)
-public class UDFMinute extends UDF {
- private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
- private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
- private final Calendar calendar = Calendar.getInstance();
-
- private final IntWritable result = new IntWritable();
-
- public UDFMinute() {
- }
+public class UDFMinute extends GenericUDF {
- /**
- * Get the minute from a date string.
- *
- * @param dateString
- * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
- * "yyyy-MM-dd".
- * @return an int from 0 to 59. null if the dateString is not a valid date
- * string.
- */
- public IntWritable evaluate(Text dateString) {
+ private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+ private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+ private final IntWritable output = new IntWritable();
- if (dateString == null) {
- return null;
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ checkArgsSize(arguments, 1, 1);
+ checkArgPrimitive(arguments, 0);
+ switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+ case INTERVAL_DAY_TIME:
+ inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
+ converters[0] = ObjectInspectorConverters.getConverter(
+ arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ obtainTimestampConverter(arguments, 0, inputTypes, converters);
+ break;
+ default:
+ // build error message
+ StringBuilder sb = new StringBuilder();
+ sb.append(getFuncName());
+ sb.append(" does not take ");
+ sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+ sb.append(" type");
+ throw new UDFArgumentTypeException(0, sb.toString());
}
- try {
- Date date = null;
- try {
- date = formatter1.parse(dateString.toString());
- } catch (ParseException e) {
- date = formatter2.parse(dateString.toString());
- }
- calendar.setTime(date);
- result.set(calendar.get(Calendar.MINUTE));
- return result;
- } catch (ParseException e) {
- return null;
- }
+ ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+ return outputOI;
}
- public IntWritable evaluate(TimestampWritable t) {
- if (t == null) {
- return null;
+ @Override
+ public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException {
+ switch (inputTypes[0]) {
+ case INTERVAL_DAY_TIME:
+ HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
+ if (intervalDayTime == null) {
+ return null;
+ }
+ output.set(intervalDayTime.getMinutes());
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ Timestamp ts = getTimestampValue(arguments, 0, converters);
+ if (ts == null) {
+ return null;
+ }
+ output.set(ts.getMinutes());
}
-
- calendar.setTime(t.getTimestamp());
- result.set(calendar.get(Calendar.MINUTE));
- return result;
+ return output;
}
- public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
- if (i == null) {
- return null;
- }
+ @Override
+ protected String getFuncName() {
+ return "minute";
+ }
- result.set(i.getHiveIntervalDayTime().getMinutes());
- return result;
+ @Override
+ public String getDisplayString(String[] children) {
+ return getStandardDisplayString(getFuncName(), children);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
index 7995934c1f..4b6f0a5c76 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
@@ -18,23 +18,23 @@
package org.apache.hadoop.hive.ql.udf;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthDate;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthString;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
/**
* UDFMonth.
@@ -51,64 +51,78 @@
+ " > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + " 7")
@VectorizedExpressions({VectorUDFMonthDate.class, VectorUDFMonthString.class, VectorUDFMonthTimestamp.class})
@NDV(maxNdv = 31)
-public class UDFMonth extends UDF {
- private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private final Calendar calendar = Calendar.getInstance();
-
- private final IntWritable result = new IntWritable();
+public class UDFMonth extends GenericUDF {
- public UDFMonth() {
- }
-
- /**
- * Get the month from a date string.
- *
- * @param dateString
- * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
- * "yyyy-MM-dd".
- * @return an int from 1 to 12. null if the dateString is not a valid date
- * string.
- */
- public IntWritable evaluate(Text dateString) {
- if (dateString == null) {
- return null;
- }
- try {
- Date date = formatter.parse(dateString.toString());
- calendar.setTime(date);
- result.set(1 + calendar.get(Calendar.MONTH));
- return result;
- } catch (ParseException e) {
- return null;
- }
- }
+ private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+ private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+ private final IntWritable output = new IntWritable();
- public IntWritable evaluate(DateWritable d) {
- if (d == null) {
- return null;
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ checkArgsSize(arguments, 1, 1);
+ checkArgPrimitive(arguments, 0);
+ switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+ case INTERVAL_YEAR_MONTH:
+ inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_YEAR_MONTH;
+ converters[0] = ObjectInspectorConverters.getConverter(
+ arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector);
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ obtainDateConverter(arguments, 0, inputTypes, converters);
+ break;
+ default:
+ // build error message
+ StringBuilder sb = new StringBuilder();
+ sb.append(getFuncName());
+ sb.append(" does not take ");
+ sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+ sb.append(" type");
+ throw new UDFArgumentTypeException(0, sb.toString());
}
- calendar.setTime(d.get(false)); // Time doesn't matter.
- result.set(1 + calendar.get(Calendar.MONTH));
- return result;
+ ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+ return outputOI;
}
- public IntWritable evaluate(TimestampWritable t) {
- if (t == null) {
- return null;
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ switch (inputTypes[0]) {
+ case INTERVAL_YEAR_MONTH:
+ HiveIntervalYearMonth intervalYearMonth = getIntervalYearMonthValue(arguments, 0, inputTypes, converters);
+ if (intervalYearMonth == null) {
+ return null;
+ }
+ output.set(intervalYearMonth.getMonths());
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ Date date = getDateValue(arguments, 0, inputTypes, converters);
+ if (date == null) {
+ return null;
+ }
+ output.set(date.getLocalDate().getMonthValue());
}
-
- calendar.setTime(t.getTimestamp());
- result.set(1 + calendar.get(Calendar.MONTH));
- return result;
+ return output;
}
- public IntWritable evaluate(HiveIntervalYearMonthWritable i) {
- if (i == null) {
- return null;
- }
+ @Override
+ protected String getFuncName() {
+ return "month";
+ }
- result.set(i.getHiveIntervalYearMonth().getMonths());
- return result;
+ @Override
+ public String getDisplayString(String[] children) {
+ return getStandardDisplayString(getFuncName(), children);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
index 5bf8b246af..5886a9a911 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
@@ -18,25 +18,24 @@
package org.apache.hadoop.hive.ql.udf;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondDate;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondString;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DoubleWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hive.common.util.DateUtils;
+
/**
* UDFSecond.
@@ -54,64 +53,78 @@
+ " > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + " 59")
@VectorizedExpressions({VectorUDFSecondDate.class, VectorUDFSecondString.class, VectorUDFSecondTimestamp.class})
@NDV(maxNdv = 60)
-public class UDFSecond extends UDF {
- private final SimpleDateFormat formatter1 = new SimpleDateFormat(
- "yyyy-MM-dd HH:mm:ss");
- private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");
- private final Calendar calendar = Calendar.getInstance();
-
- private final IntWritable result = new IntWritable();
-
- public UDFSecond() {
- }
+public class UDFSecond extends GenericUDF {
- /**
- * Get the minute from a date string.
- *
- * @param dateString
- * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
- * "yyyy-MM-dd".
- * @return an int from 0 to 59. null if the dateString is not a valid date
- * string.
- */
- public IntWritable evaluate(Text dateString) {
+ private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+ private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+ private final IntWritable output = new IntWritable();
- if (dateString == null) {
- return null;
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ checkArgsSize(arguments, 1, 1);
+ checkArgPrimitive(arguments, 0);
+ switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+ case INTERVAL_DAY_TIME:
+ inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_DAY_TIME;
+ converters[0] = ObjectInspectorConverters.getConverter(
+ arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector);
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ obtainTimestampConverter(arguments, 0, inputTypes, converters);
+ break;
+ default:
+ // build error message
+ StringBuilder sb = new StringBuilder();
+ sb.append(getFuncName());
+ sb.append(" does not take ");
+ sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+ sb.append(" type");
+ throw new UDFArgumentTypeException(0, sb.toString());
}
- try {
- Date date = null;
- try {
- date = formatter1.parse(dateString.toString());
- } catch (ParseException e) {
- date = formatter2.parse(dateString.toString());
- }
- calendar.setTime(date);
- result.set(calendar.get(Calendar.SECOND));
- return result;
- } catch (ParseException e) {
- return null;
- }
+ ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+ return outputOI;
}
- public IntWritable evaluate(TimestampWritable t) {
- if (t == null) {
- return null;
+ @Override
+ public Object evaluate(GenericUDF.DeferredObject[] arguments) throws HiveException {
+ switch (inputTypes[0]) {
+ case INTERVAL_DAY_TIME:
+ HiveIntervalDayTime intervalDayTime = getIntervalDayTimeValue(arguments, 0, inputTypes, converters);
+ if (intervalDayTime == null) {
+ return null;
+ }
+ output.set(intervalDayTime.getSeconds());
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ Timestamp ts = getTimestampValue(arguments, 0, converters);
+ if (ts == null) {
+ return null;
+ }
+ output.set(ts.getSeconds());
}
-
- calendar.setTime(t.getTimestamp());
- result.set(calendar.get(Calendar.SECOND));
- return result;
+ return output;
}
- public IntWritable evaluate(HiveIntervalDayTimeWritable i) {
- if (i == null) {
- return null;
- }
+ @Override
+ protected String getFuncName() {
+ return "second";
+ }
- HiveIntervalDayTime idt = i.getHiveIntervalDayTime();
- result.set(idt.getSeconds());
- return result;
+ @Override
+ public String getDisplayString(String[] children) {
+ return getStandardDisplayString(getFuncName(), children);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
index d7d8bccb7c..03da478540 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
@@ -30,7 +30,7 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDateToBooleanViaLongToLong;
import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToBoolean;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -179,7 +179,7 @@ public BooleanWritable evaluate(Text i) {
return booleanWritable;
}
- public BooleanWritable evaluate(DateWritable d) {
+ public BooleanWritable evaluate(DateWritableV2 d) {
// date value to boolean doesn't make any sense.
return null;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
index 557cb1c6fb..4b5567affc 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
@@ -21,7 +21,7 @@
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -135,7 +135,7 @@ public Text evaluate(Text i) {
return i;
}
- public Text evaluate(DateWritable d) {
+ public Text evaluate(DateWritableV2 d) {
if (d == null) {
return null;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
index 18ed52dc1f..1b33ac4c40 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
@@ -18,11 +18,11 @@
package org.apache.hadoop.hive.ql.udf;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
+import java.time.DayOfWeek;
+import java.time.temporal.IsoFields;
+import java.time.temporal.WeekFields;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
@@ -30,7 +30,7 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearString;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearTimestamp;
import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -49,14 +49,13 @@
@VectorizedExpressions({VectorUDFWeekOfYearDate.class, VectorUDFWeekOfYearString.class, VectorUDFWeekOfYearTimestamp.class})
@NDV(maxNdv = 52)
public class UDFWeekOfYear extends UDF {
- private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private final Calendar calendar = Calendar.getInstance();
+
+ private final WeekFields weekFields;
private final IntWritable result = new IntWritable();
public UDFWeekOfYear() {
- calendar.setFirstDayOfWeek(Calendar.MONDAY);
- calendar.setMinimalDaysInFirstWeek(4);
+ weekFields = WeekFields.of(DayOfWeek.MONDAY, 4);
}
/**
@@ -73,22 +72,20 @@ public IntWritable evaluate(Text dateString) {
return null;
}
try {
- Date date = formatter.parse(dateString.toString());
- calendar.setTime(date);
- result.set(calendar.get(Calendar.WEEK_OF_YEAR));
+ Date date = Date.valueOf(dateString.toString());
+ result.set(date.getLocalDate().get(weekFields.weekOfWeekBasedYear()));
return result;
- } catch (ParseException e) {
+ } catch (IllegalArgumentException e) {
return null;
}
}
- public IntWritable evaluate(DateWritable d) {
+ public IntWritable evaluate(DateWritableV2 d) {
if (d == null) {
return null;
}
- calendar.setTime(d.get(false)); // Time doesn't matter.
- result.set(calendar.get(Calendar.WEEK_OF_YEAR));
+ result.set(d.get().getLocalDate().get(weekFields.weekOfWeekBasedYear()));
return result;
}
@@ -97,8 +94,7 @@ public IntWritable evaluate(TimestampWritable t) {
return null;
}
- calendar.setTime(t.getTimestamp());
- result.set(calendar.get(Calendar.WEEK_OF_YEAR));
+ result.set(t.getTimestamp().getLocalDateTime().get(IsoFields.WEEK_OF_WEEK_BASED_YEAR));
return result;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
index 84175913f3..1873b855ed 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
@@ -18,23 +18,23 @@
package org.apache.hadoop.hive.ql.udf;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearDate;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearString;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearTimestamp;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.NDV;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
/**
* UDFYear.
@@ -51,66 +51,78 @@
+ " > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + " 2009")
@VectorizedExpressions({VectorUDFYearDate.class, VectorUDFYearString.class, VectorUDFYearTimestamp.class})
@NDV(maxNdv = 20) // although technically its unbounded, its unlikely we will ever see ndv > 20
-public class UDFYear extends UDF {
- private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private final Calendar calendar = Calendar.getInstance();
-
- private final IntWritable result = new IntWritable();
+public class UDFYear extends GenericUDF {
- public UDFYear() {
- }
+ private transient ObjectInspectorConverters.Converter[] converters = new ObjectInspectorConverters.Converter[1];
+ private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
+ private final IntWritable output = new IntWritable();
- /**
- * Get the year from a date string.
- *
- * @param dateString
- * the dateString in the format of "yyyy-MM-dd HH:mm:ss" or
- * "yyyy-MM-dd".
- * @return an int from 1 to 12. null if the dateString is not a valid date
- * string.
- */
- public IntWritable evaluate(Text dateString) {
-
- if (dateString == null) {
- return null;
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ checkArgsSize(arguments, 1, 1);
+ checkArgPrimitive(arguments, 0);
+ switch (((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory()) {
+ case INTERVAL_YEAR_MONTH:
+ inputTypes[0] = PrimitiveObjectInspector.PrimitiveCategory.INTERVAL_YEAR_MONTH;
+ converters[0] = ObjectInspectorConverters.getConverter(
+ arguments[0], PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector);
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ obtainDateConverter(arguments, 0, inputTypes, converters);
+ break;
+ default:
+ // build error message
+ StringBuilder sb = new StringBuilder();
+ sb.append(getFuncName());
+ sb.append(" does not take ");
+ sb.append(((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory());
+ sb.append(" type");
+ throw new UDFArgumentTypeException(0, sb.toString());
}
- try {
- Date date = formatter.parse(dateString.toString());
- calendar.setTime(date);
- result.set(calendar.get(Calendar.YEAR));
- return result;
- } catch (ParseException e) {
- return null;
- }
+ ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
+ return outputOI;
}
- public IntWritable evaluate(DateWritable d) {
- if (d == null) {
- return null;
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ switch (inputTypes[0]) {
+ case INTERVAL_YEAR_MONTH:
+ HiveIntervalYearMonth intervalYearMonth = getIntervalYearMonthValue(arguments, 0, inputTypes, converters);
+ if (intervalYearMonth == null) {
+ return null;
+ }
+ output.set(intervalYearMonth.getYears());
+ break;
+ case STRING:
+ case CHAR:
+ case VARCHAR:
+ case DATE:
+ case TIMESTAMP:
+ case TIMESTAMPLOCALTZ:
+ case VOID:
+ Date date = getDateValue(arguments, 0, inputTypes, converters);
+ if (date == null) {
+ return null;
+ }
+ output.set(date.getLocalDate().getYear());
}
-
- calendar.setTime(d.get(false)); // Time doesn't matter.
- result.set(calendar.get(Calendar.YEAR));
- return result;
+ return output;
}
- public IntWritable evaluate(TimestampWritable t) {
- if (t == null) {
- return null;
- }
-
- calendar.setTime(t.getTimestamp());
- result.set(calendar.get(Calendar.YEAR));
- return result;
+ @Override
+ protected String getFuncName() {
+ return "year";
}
- public IntWritable evaluate(HiveIntervalYearMonthWritable i) {
- if (i == null) {
- return null;
- }
-
- result.set(i.getHiveIntervalYearMonth().getYears());
- return result;
+ @Override
+ public String getDisplayString(String[] children) {
+ return getStandardDisplayString(getFuncName(), children);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
index 5c67242850..d69a4f74f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/BaseMaskUDF.java
@@ -21,6 +21,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -34,8 +35,6 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-import java.sql.Date;
-
public abstract class BaseMaskUDF extends GenericUDF {
private static final Log LOG = LogFactory.getLog(BaseMaskUDF.class);
@@ -228,13 +227,13 @@ public Object getTransformedWritable(DeferredObject object) throws HiveException
class DateTransformerAdapter extends AbstractTransformerAdapter {
final DateObjectInspector columnType;
- final DateWritable writable;
+ final DateWritableV2 writable;
public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer) {
- this(columnType, transformer, new DateWritable());
+ this(columnType, transformer, new DateWritableV2());
}
- public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer, DateWritable writable) {
+ public DateTransformerAdapter(DateObjectInspector columnType, AbstractTransformer transformer, DateWritableV2 writable) {
super(transformer);
this.columnType = columnType;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
index ca8bc8f42e..a8bcc972bb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFBloomFilter.java
@@ -20,19 +20,19 @@
import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.SelectOperator;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedUDAFs;
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.*;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.gen.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ColStatistics;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.Statistics;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -46,7 +46,6 @@
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
-import java.sql.Timestamp;
import java.util.List;
/**
@@ -197,14 +196,14 @@ public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveExcep
bf.addBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
break;
case DATE:
- DateWritable vDate = ((DateObjectInspector)inputOI).
+ DateWritableV2 vDate = ((DateObjectInspector)inputOI).
getPrimitiveWritableObject(parameters[0]);
bf.addLong(vDate.getDays());
break;
case TIMESTAMP:
Timestamp vTimeStamp = ((TimestampObjectInspector)inputOI).
getPrimitiveJavaObject(parameters[0]);
- bf.addLong(vTimeStamp.getTime());
+ bf.addLong(vTimeStamp.toEpochMilli());
break;
case CHAR:
Text vChar = ((HiveCharObjectInspector)inputOI).
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
index 226758981c..dd365dddcb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
@@ -29,7 +29,7 @@
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -1297,7 +1297,7 @@ public void reset(AggregationBuffer agg) throws HiveException {
* High/low value will be saved in stats DB as long value representing days since epoch.
*/
public static class GenericUDAFDateStatsEvaluator
- extends GenericUDAFNumericStatsEvaluator {
+ extends GenericUDAFNumericStatsEvaluator {
@Override
protected DateObjectInspector getValueObjectInspector() {
@@ -1319,8 +1319,8 @@ public int estimate() {
@Override
protected void update(Object p, PrimitiveObjectInspector inputOI) {
- // DateWritable is mutable, DateStatsAgg needs its own copy
- DateWritable v = new DateWritable((DateWritable) inputOI.getPrimitiveWritableObject(p));
+ // DateWritableV2 is mutable, DateStatsAgg needs its own copy
+ DateWritableV2 v = new DateWritableV2((DateWritableV2) inputOI.getPrimitiveWritableObject(p));
//Update min counter if new value is less than min seen so far
if (min == null || v.compareTo(min) < 0) {
@@ -1338,8 +1338,8 @@ protected void update(Object p, PrimitiveObjectInspector inputOI) {
protected void updateMin(Object minValue, DateObjectInspector minFieldOI) {
if ((minValue != null) && (min == null ||
min.compareTo(minFieldOI.getPrimitiveWritableObject(minValue)) > 0)) {
- // DateWritable is mutable, DateStatsAgg needs its own copy
- min = new DateWritable(minFieldOI.getPrimitiveWritableObject(minValue));
+ // DateWritableV2 is mutable, DateStatsAgg needs its own copy
+ min = new DateWritableV2(minFieldOI.getPrimitiveWritableObject(minValue));
}
}
@@ -1347,8 +1347,8 @@ protected void updateMin(Object minValue, DateObjectInspector minFieldOI) {
protected void updateMax(Object maxValue, DateObjectInspector maxFieldOI) {
if ((maxValue != null) && (max == null ||
max.compareTo(maxFieldOI.getPrimitiveWritableObject(maxValue)) < 0)) {
- // DateWritable is mutable, DateStatsAgg needs its own copy
- max = new DateWritable(maxFieldOI.getPrimitiveWritableObject(maxValue));
+ // DateWritableV2 is mutable, DateStatsAgg needs its own copy
+ max = new DateWritableV2(maxFieldOI.getPrimitiveWritableObject(maxValue));
}
}
};
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
index 710f0e88e1..536e56410c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
@@ -20,12 +20,13 @@
import java.io.Closeable;
import java.io.IOException;
-import java.sql.Timestamp;
-import java.text.ParseException;
-import java.util.Date;
import org.apache.hadoop.hive.common.classification.InterfaceAudience;
import org.apache.hadoop.hive.common.classification.InterfaceStability;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.MapredContext;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -34,8 +35,10 @@
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
+import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -45,14 +48,12 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
import org.apache.hadoop.io.BooleanWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
-import org.apache.hive.common.util.DateUtils;
/**
* A Generic User-defined function (GenericUDF) for the use with Hive.
@@ -489,7 +490,7 @@ protected Double getDoubleValue(DeferredObject[] arguments, int i, Converter[] c
}
protected Date getDateValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes,
- Converter[] converters) throws HiveException {
+ Converter[] converters) throws HiveException {
Object obj;
if ((obj = arguments[i].get()) == null) {
return null;
@@ -502,16 +503,16 @@ protected Date getDateValue(DeferredObject[] arguments, int i, PrimitiveCategory
case CHAR:
String dateStr = converters[i].convert(obj).toString();
try {
- date = DateUtils.getDateFormat().parse(dateStr);
- } catch (ParseException e) {
- throw new UDFArgumentException("Unparsable date: " + dateStr);
+ date = Date.valueOf(dateStr);
+ } catch (IllegalArgumentException e) {
+ date = null;
}
break;
case TIMESTAMP:
case DATE:
case TIMESTAMPLOCALTZ:
Object writableValue = converters[i].convert(obj);
- date = ((DateWritable) writableValue).get();
+ date = ((DateWritableV2) writableValue).get();
break;
default:
throw new UDFArgumentTypeException(0, getFuncName()
@@ -535,6 +536,58 @@ protected Timestamp getTimestampValue(DeferredObject[] arguments, int i, Convert
return ts;
}
+ protected HiveIntervalYearMonth getIntervalYearMonthValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes,
+ Converter[] converters) throws HiveException {
+ Object obj;
+ if ((obj = arguments[i].get()) == null) {
+ return null;
+ }
+
+ HiveIntervalYearMonth intervalYearMonth;
+ switch (inputTypes[i]) {
+ case STRING:
+ case VARCHAR:
+ case CHAR:
+ String intervalYearMonthStr = converters[i].convert(obj).toString();
+ intervalYearMonth = HiveIntervalYearMonth.valueOf(intervalYearMonthStr);
+ break;
+ case INTERVAL_YEAR_MONTH:
+ Object writableValue = converters[i].convert(obj);
+ intervalYearMonth = ((HiveIntervalYearMonthWritable) writableValue).getHiveIntervalYearMonth();
+ break;
+ default:
+ throw new UDFArgumentTypeException(0, getFuncName()
+ + " only takes INTERVAL_YEAR_MONTH and STRING_GROUP types, got " + inputTypes[i]);
+ }
+ return intervalYearMonth;
+ }
+
+ protected HiveIntervalDayTime getIntervalDayTimeValue(DeferredObject[] arguments, int i, PrimitiveCategory[] inputTypes,
+ Converter[] converters) throws HiveException {
+ Object obj;
+ if ((obj = arguments[i].get()) == null) {
+ return null;
+ }
+
+ HiveIntervalDayTime intervalDayTime;
+ switch (inputTypes[i]) {
+ case STRING:
+ case VARCHAR:
+ case CHAR:
+ String intervalDayTimeStr = converters[i].convert(obj).toString();
+ intervalDayTime = HiveIntervalDayTime.valueOf(intervalDayTimeStr);
+ break;
+ case INTERVAL_DAY_TIME:
+ Object writableValue = converters[i].convert(obj);
+ intervalDayTime = ((HiveIntervalDayTimeWritable) writableValue).getHiveIntervalDayTime();
+ break;
+ default:
+ throw new UDFArgumentTypeException(0, getFuncName()
+ + " only takes INTERVAL_DAY_TIME and STRING_GROUP types, got " + inputTypes[i]);
+ }
+ return intervalDayTime;
+ }
+
protected String getConstantStringValue(ObjectInspector[] arguments, int i) {
Object constValue = ((ConstantObjectInspector) arguments[i]).getWritableConstantValue();
String str = constValue == null ? null : constValue.toString();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
index dae4b97b4a..07a0314ef5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
@@ -23,8 +23,9 @@
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP;
import java.util.Calendar;
-import java.util.Date;
+import java.util.TimeZone;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -34,7 +35,6 @@
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
-import org.apache.hive.common.util.DateUtils;
/**
* GenericUDFAddMonths.
@@ -53,7 +53,7 @@
public class GenericUDFAddMonths extends GenericUDF {
private transient Converter[] converters = new Converter[2];
private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2];
- private final Calendar calendar = Calendar.getInstance();
+ private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
private final Text output = new Text();
private transient Integer numMonthsConst;
private transient boolean isNumMonthsConst;
@@ -100,8 +100,8 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
}
addMonth(date, numMonthInt);
- Date newDate = calendar.getTime();
- output.set(DateUtils.getDateFormat().format(newDate));
+ Date newDate = Date.ofEpochMilli(calendar.getTimeInMillis());
+ output.set(newDate.toString());
return output;
}
@@ -116,7 +116,7 @@ protected String getFuncName() {
}
protected Calendar addMonth(Date d, int numMonths) {
- calendar.setTime(d);
+ calendar.setTimeInMillis(d.toEpochMilli());
boolean lastDatOfMonth = isLastDayOfMonth(calendar);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
index 7d3c3f46aa..cffd10beee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentDate.java
@@ -17,15 +17,14 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.UDFType;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -38,7 +37,7 @@
@NDV(maxNdv = 1)
public class GenericUDFCurrentDate extends GenericUDF {
- protected DateWritable currentDate;
+ protected DateWritableV2 currentDate;
@Override
public ObjectInspector initialize(ObjectInspector[] arguments)
@@ -52,7 +51,7 @@ public ObjectInspector initialize(ObjectInspector[] arguments)
if (currentDate == null) {
Date dateVal =
Date.valueOf(SessionState.get().getQueryCurrentTimestamp().toString().substring(0, 10));
- currentDate = new DateWritable(dateVal);
+ currentDate = new DateWritableV2(dateVal);
}
return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
@@ -63,11 +62,11 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
return currentDate;
}
- public DateWritable getCurrentDate() {
+ public DateWritableV2 getCurrentDate() {
return currentDate;
}
- public void setCurrentDate(DateWritable currentDate) {
+ public void setCurrentDate(DateWritableV2 currentDate) {
this.currentDate = currentDate;
}
@@ -83,7 +82,7 @@ public void copyToNewInstance(Object newInstance) throws UDFArgumentException {
// Need to preserve currentDate
GenericUDFCurrentDate other = (GenericUDFCurrentDate) newInstance;
if (this.currentDate != null) {
- other.currentDate = new DateWritable(this.currentDate);
+ other.currentDate = new DateWritableV2(this.currentDate);
}
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
index 9da51c84f5..741aaffb6a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentTimestamp.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -48,7 +49,9 @@ public ObjectInspector initialize(ObjectInspector[] arguments)
}
if (currentTimestamp == null) {
- currentTimestamp = new TimestampWritable(SessionState.get().getQueryCurrentTimestamp());
+ java.sql.Timestamp ts = SessionState.get().getQueryCurrentTimestamp();
+ currentTimestamp = new TimestampWritable(
+ Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
}
return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
index b73893d0bc..fdbd99eb24 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
@@ -17,9 +17,8 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -28,7 +27,7 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateString;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateTimestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -57,8 +56,8 @@
private transient PrimitiveCategory inputType;
private transient PrimitiveObjectInspector argumentOI;
private transient DateParser dateParser = new DateParser();
- private transient final DateWritable output = new DateWritable();
- private transient final Date date = new Date(0);
+ private transient final DateWritableV2 output = new DateWritableV2();
+ private transient final Date date = new Date();
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
@@ -119,11 +118,11 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
case TIMESTAMP:
Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get()))
.getTimestamp();
- output.set(DateWritable.millisToDays(ts.getTime()));
+ output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
break;
case TIMESTAMPLOCALTZ:
case DATE:
- DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
+ DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
output.set(dw);
break;
default:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
index 8ba103beb6..f7b7fdde86 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
@@ -17,9 +17,8 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -30,7 +29,7 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateAddScalarCol;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -65,12 +64,12 @@
@VectorizedExpressions({VectorUDFDateAddColScalar.class, VectorUDFDateAddScalarCol.class, VectorUDFDateAddColCol.class})
public class GenericUDFDateAdd extends GenericUDF {
private transient final DateParser dateParser = new DateParser();
- private transient final Date dateVal = new Date(0);
+ private transient final Date dateVal = new Date();
private transient Converter dateConverter;
private transient Converter daysConverter;
private transient PrimitiveCategory inputType1;
private transient PrimitiveCategory inputType2;
- private final DateWritable output = new DateWritable();
+ private final DateWritableV2 output = new DateWritableV2();
protected int signModifier = 1; // 1 for addition, -1 for subtraction
@Override
@@ -163,7 +162,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
return null;
}
- // Convert the first param into a DateWritable value
+ // Convert the first param into a DateWritableV2 value
switch (inputType1) {
case STRING:
String dateString = dateConverter.convert(arguments[0].get()).toString();
@@ -176,10 +175,10 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
case TIMESTAMP:
Timestamp ts = ((TimestampWritable) dateConverter.convert(arguments[0].get()))
.getTimestamp();
- output.set(DateWritable.millisToDays(ts.getTime()));
+ output.set(DateWritableV2.millisToDays(ts.toEpochMilli()));
break;
case DATE:
- DateWritable dw = (DateWritable) dateConverter.convert(arguments[0].get());
+ DateWritableV2 dw = (DateWritableV2) dateConverter.convert(arguments[0].get());
output.set(dw.getDays());
break;
default:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
index e9cbcf7459..eab9e6b70f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateDiff.java
@@ -17,11 +17,8 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Timestamp;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.sql.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -32,7 +29,7 @@
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateDiffColScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateDiffScalarCol;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -43,6 +40,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.io.IntWritable;
import javax.annotation.Nullable;
@@ -65,7 +63,6 @@
+ " 1")
@VectorizedExpressions({VectorUDFDateDiffColScalar.class, VectorUDFDateDiffColCol.class, VectorUDFDateDiffScalarCol.class})
public class GenericUDFDateDiff extends GenericUDF {
- private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
private transient Converter inputConverter1;
private transient Converter inputConverter2;
private IntWritable output = new IntWritable();
@@ -116,21 +113,25 @@ private Date convertToDate(PrimitiveCategory inputType, Converter converter, Def
case CHAR:
String dateString = converter.convert(argument.get()).toString();
try {
- return new Date(formatter.parse(dateString).getTime());
- } catch (ParseException e) {
+ return Date.valueOf(dateString);
+ } catch (IllegalArgumentException e) {
+ Timestamp ts = PrimitiveObjectInspectorUtils.getTimestampFromString(dateString);
+ if (ts != null) {
+ return Date.ofEpochMilli(ts.toEpochMilli());
+ }
return null;
}
case TIMESTAMP:
Timestamp ts = ((TimestampWritable) converter.convert(argument.get()))
.getTimestamp();
- return new Date(ts.getTime());
+ return Date.ofEpochMilli(ts.toEpochMilli());
case DATE:
- DateWritable dw = (DateWritable) converter.convert(argument.get());
+ DateWritableV2 dw = (DateWritableV2) converter.convert(argument.get());
return dw.get();
case TIMESTAMPLOCALTZ:
TimestampTZ tsz = ((TimestampLocalTZWritable) converter.convert(argument.get()))
.getTimestampTZ();
- return new Date(tsz.getEpochSecond() * 1000l);
+ return Date.ofEpochMilli(tsz.getEpochSecond() * 1000l);
default:
throw new UDFArgumentException(
"TO_DATE() only takes STRING/TIMESTAMP/TIMESTAMPLOCALTZ types, got " + inputType);
@@ -175,7 +176,7 @@ private IntWritable evaluate(Date date, Date date2) {
return null;
}
- result.set(DateWritable.dateToDays(date) - DateWritable.dateToDays(date2));
+ result.set(DateWritableV2.dateToDays(date) - DateWritableV2.dateToDays(date2));
return result;
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
index 6b775d6595..6d3e86f921 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
@@ -21,8 +21,10 @@
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
import java.text.SimpleDateFormat;
-import java.util.Date;
+import java.util.TimeZone;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
@@ -44,14 +46,15 @@
@Description(name = "date_format", value = "_FUNC_(date/timestamp/string, fmt) - converts a date/timestamp/string "
+ "to a value of string in the format specified by the date format fmt.",
extended = "Supported formats are SimpleDateFormat formats - "
- + "https://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html. "
- + "Second argument fmt should be constant.\n"
- + "Example: > SELECT _FUNC_('2015-04-08', 'y');\n '2015'")
+ + "https://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html. "
+ + "Second argument fmt should be constant.\n"
+ + "Example: > SELECT _FUNC_('2015-04-08', 'y');\n '2015'")
public class GenericUDFDateFormat extends GenericUDF {
private transient Converter[] tsConverters = new Converter[2];
private transient PrimitiveCategory[] tsInputTypes = new PrimitiveCategory[2];
private transient Converter[] dtConverters = new Converter[2];
private transient PrimitiveCategory[] dtInputTypes = new PrimitiveCategory[2];
+ private final java.util.Date date = new java.util.Date();
private final Text output = new Text();
private transient SimpleDateFormat formatter;
@@ -77,6 +80,7 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen
if (fmtStr != null) {
try {
formatter = new SimpleDateFormat(fmtStr);
+ formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
} catch (IllegalArgumentException e) {
// ignore
}
@@ -97,14 +101,16 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
}
// the function should support both short date and full timestamp format
// time part of the timestamp should not be skipped
- Date date = getTimestampValue(arguments, 0, tsConverters);
- if (date == null) {
- date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
- if (date == null) {
+ Timestamp ts = getTimestampValue(arguments, 0, tsConverters);
+ if (ts == null) {
+ Date d = getDateValue(arguments, 0, dtInputTypes, dtConverters);
+ if (d == null) {
return null;
}
+ ts = Timestamp.ofEpochMilli(d.toEpochMilli());
}
+ date.setTime(ts.toEpochMilli());
String res = formatter.format(date);
if (res == null) {
return null;
@@ -122,4 +128,4 @@ public String getDisplayString(String[] children) {
protected String getFuncName() {
return "date_format";
}
-}
+}
\ No newline at end of file
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
index 2fef893b17..bcc4114099 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
@@ -17,34 +17,13 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Timestamp;
-import java.text.ParseException;
import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubColCol;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubColScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubScalarCol;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.IntWritable;
/**
* UDFDateSub.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
index 8691ed15e3..69229bbe7d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFromUtcTimestamp.java
@@ -17,13 +17,11 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Timestamp;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
import java.util.TimeZone;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.common.type.TimestampTZUtil;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -34,6 +32,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
@Description(name = "from_utc_timestamp",
value = "from_utc_timestamp(timestamp, string timezone) - "
@@ -45,7 +45,6 @@
private transient PrimitiveObjectInspector[] argumentOIs;
private transient TimestampConverter timestampConverter;
private transient TextConverter textConverter;
- private transient SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private transient TimeZone tzUTC = TimeZone.getTimeZone("UTC");
@Override
@@ -70,26 +69,6 @@ public ObjectInspector initialize(ObjectInspector[] arguments)
return PrimitiveObjectInspectorFactory.javaTimestampObjectInspector;
}
- /**
- * Parse the timestamp string using the input TimeZone.
- * This does not parse fractional seconds.
- * @param tsString
- * @param tz
- * @return
- */
- protected Timestamp timestampFromString(String tsString, TimeZone tz) {
- dateFormat.setTimeZone(tz);
- try {
- java.util.Date date = dateFormat.parse(tsString);
- if (date == null) {
- return null;
- }
- return new Timestamp(date.getTime());
- } catch (ParseException err) {
- return null;
- }
- }
-
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
Object o0 = arguments[0].get();
@@ -123,21 +102,15 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
// inputTs is the year/month/day/hour/minute/second in the local timezone.
// For this UDF we want it in the timezone represented by fromTz
- Timestamp fromTs = timestampFromString(inputTs.toString(), fromTz);
+ TimestampTZ fromTs = TimestampTZUtil.parse(inputTs.toString(), fromTz.toZoneId());
if (fromTs == null) {
return null;
}
// Now output this timestamp's millis value to the equivalent toTz.
- dateFormat.setTimeZone(toTz);
- Timestamp result = Timestamp.valueOf(dateFormat.format(fromTs));
-
- if (inputTs.getNanos() != 0) {
- result.setNanos(inputTs.getNanos());
- }
-
+ Timestamp result = new Timestamp(
+ fromTs.getZonedDateTime().withZoneSameInstant(toTz.toZoneId()).toLocalDateTime());
return result;
-
}
@Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
index d739af94f2..733fe63e80 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInBloomFilter.java
@@ -20,13 +20,14 @@
import org.apache.hadoop.hive.common.io.NonSyncByteArrayInputStream;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorInBloomFilterColDynamicValue;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -37,10 +38,8 @@
import org.apache.hadoop.io.Text;
import org.apache.hive.common.util.BloomKFilter;
-import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.sql.Timestamp;
/**
* GenericUDF to lookup a value in BloomFilter
@@ -147,13 +146,13 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
int startIdx = vDecimal.toBytes(scratchBuffer);
return bloomFilter.testBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx);
case DATE:
- DateWritable vDate = ((DateObjectInspector) valObjectInspector).
+ DateWritableV2 vDate = ((DateObjectInspector) valObjectInspector).
getPrimitiveWritableObject(arguments[0].get());
return bloomFilter.testLong(vDate.getDays());
case TIMESTAMP:
Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector).
getPrimitiveJavaObject(arguments[0].get());
- return bloomFilter.testLong(vTimeStamp.getTime());
+ return bloomFilter.testLong(vTimeStamp.toEpochMilli());
case CHAR:
Text vChar = ((HiveCharObjectInspector) valObjectInspector).
getPrimitiveWritableObject(arguments[0].get()).getStrippedValue();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
index 238eff91c6..52126dacb1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFLastDay.java
@@ -20,9 +20,8 @@
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.DATE_GROUP;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP;
-import java.util.Calendar;
-import java.util.Date;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -31,7 +30,6 @@
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
-import org.apache.hive.common.util.DateUtils;
/**
* GenericUDFLastDay.
@@ -48,7 +46,7 @@
public class GenericUDFLastDay extends GenericUDF {
private transient Converter[] converters = new Converter[1];
private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[1];
- private final Calendar calendar = Calendar.getInstance();
+ private final Date date = new Date();
private final Text output = new Text();
@Override
@@ -67,14 +65,13 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
- Date date = getDateValue(arguments, 0, inputTypes, converters);
- if (date == null) {
+ Date d = getDateValue(arguments, 0, inputTypes, converters);
+ if (d == null) {
return null;
}
- lastDay(date);
- Date newDate = calendar.getTime();
- output.set(DateUtils.getDateFormat().format(newDate));
+ lastDay(d);
+ output.set(date.toString());
return output;
}
@@ -88,10 +85,9 @@ protected String getFuncName() {
return "last_day";
}
- protected Calendar lastDay(Date d) {
- calendar.setTime(d);
- int maxDd = calendar.getActualMaximum(Calendar.DAY_OF_MONTH);
- calendar.set(Calendar.DAY_OF_MONTH, maxDd);
- return calendar;
+ protected Date lastDay(Date d) {
+ date.setTimeInDays(d.toEpochDay());
+ date.setDayOfMonth(date.getLocalDate().lengthOfMonth());
+ return date;
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
index bf2ec823b0..4c94164411 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMask.java
@@ -19,8 +19,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -218,11 +217,12 @@ Long transform(final Long value) {
@Override
Date transform(final Date value) {
- int year = maskedYearValue == UNMASKED_VAL ? value.getYear() : maskedYearValue;
- int month = maskedMonthValue == UNMASKED_VAL ? value.getMonth() : maskedMonthValue;
- int day = maskedDayValue == UNMASKED_VAL ? value.getDate() : maskedDayValue;
+ int actualMonthValue = maskedMonthValue + 1;
+ int year = maskedYearValue == UNMASKED_VAL ? value.getLocalDate().getYear() : maskedYearValue;
+ int month = maskedMonthValue == UNMASKED_VAL ? value.getLocalDate().getMonthValue() : actualMonthValue;
+ int day = maskedDayValue == UNMASKED_VAL ? value.getLocalDate().getDayOfMonth() : maskedDayValue;
- return new Date(year, month, day);
+ return Date.of(year, month, day);
}
protected int transformChar(final int c) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
index 8b1e988b42..a068541d36 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMaskHash.java
@@ -18,9 +18,8 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-
import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
index d04e13533e..dda30c0c2d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
@@ -18,19 +18,13 @@
package org.apache.hadoop.hive.ql.udf.generic;
import static java.math.BigDecimal.ROUND_HALF_UP;
-import static java.util.Calendar.DATE;
-import static java.util.Calendar.HOUR_OF_DAY;
-import static java.util.Calendar.MINUTE;
-import static java.util.Calendar.MONTH;
-import static java.util.Calendar.SECOND;
-import static java.util.Calendar.YEAR;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.DATE_GROUP;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
import java.math.BigDecimal;
-import java.util.Calendar;
-import java.util.Date;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -59,12 +53,11 @@
+ " Example:\n"
+ " > SELECT _FUNC_('1997-02-28 10:30:00', '1996-10-30');\n 3.94959677")
public class GenericUDFMonthsBetween extends GenericUDF {
+
private transient Converter[] tsConverters = new Converter[2];
private transient PrimitiveCategory[] tsInputTypes = new PrimitiveCategory[2];
private transient Converter[] dtConverters = new Converter[2];
private transient PrimitiveCategory[] dtInputTypes = new PrimitiveCategory[2];
- private final Calendar cal1 = Calendar.getInstance();
- private final Calendar cal2 = Calendar.getInstance();
private final DoubleWritable output = new DoubleWritable();
private boolean isRoundOffNeeded = true;
@@ -103,37 +96,39 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen
public Object evaluate(DeferredObject[] arguments) throws HiveException {
// the function should support both short date and full timestamp format
// time part of the timestamp should not be skipped
- Date date1 = getTimestampValue(arguments, 0, tsConverters);
+ Timestamp date1 = getTimestampValue(arguments, 0, tsConverters);
if (date1 == null) {
- date1 = getDateValue(arguments, 0, dtInputTypes, dtConverters);
- if (date1 == null) {
+ Date date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
+ if (date == null) {
return null;
}
+ date1 = Timestamp.ofEpochMilli(date.toEpochMilli());
}
- Date date2 = getTimestampValue(arguments, 1, tsConverters);
+ Timestamp date2 = getTimestampValue(arguments, 1, tsConverters);
if (date2 == null) {
- date2 = getDateValue(arguments, 1, dtInputTypes, dtConverters);
- if (date2 == null) {
+ Date date = getDateValue(arguments, 1, dtInputTypes, dtConverters);
+ if (date == null) {
return null;
}
+ date2 = Timestamp.ofEpochMilli(date.toEpochMilli());
}
- cal1.setTime(date1);
- cal2.setTime(date2);
-
// skip day/time part if both dates are end of the month
// or the same day of the month
- int monDiffInt = (cal1.get(YEAR) - cal2.get(YEAR)) * 12 + (cal1.get(MONTH) - cal2.get(MONTH));
- if (cal1.get(DATE) == cal2.get(DATE)
- || (cal1.get(DATE) == cal1.getActualMaximum(DATE) && cal2.get(DATE) == cal2
- .getActualMaximum(DATE))) {
+ int monDiffInt = (date1.getYear() - date2.getYear()) * 12
+ + (date1.getMonth() - date2.getMonth());
+ if (date1.getDay() == date2.getDay()
+ || (date1.getDay() == date1.getLocalDateTime().toLocalDate().lengthOfMonth()
+ && date2.getDay() == date2.getLocalDateTime().toLocalDate().lengthOfMonth())) {
output.set(monDiffInt);
return output;
}
- int sec1 = getDayPartInSec(cal1);
- int sec2 = getDayPartInSec(cal2);
+ Date start1 = Date.of(date1.getYear(), date1.getMonth(), 1);
+ Date start2 = Date.of(date2.getYear(), date2.getMonth(), 1);
+ long sec1 = date1.toEpochSecond() - start1.toEpochSecond();
+ long sec2 = date2.toEpochSecond() - start2.toEpochSecond();
// 1 sec is 0.000000373 months (1/2678400). 1 month is 31 days.
// there should be no adjustments for leap seconds
@@ -146,15 +141,6 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
return output;
}
- protected int getDayPartInSec(Calendar cal) {
- int dd = cal.get(DATE);
- int HH = cal.get(HOUR_OF_DAY);
- int mm = cal.get(MINUTE);
- int ss = cal.get(SECOND);
- int dayInSec = dd * 86400 + HH * 3600 + mm * 60 + ss;
- return dayInSec;
- }
-
@Override
public String getDisplayString(String[] children) {
return getStandardDisplayString(getFuncName(), children);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
index e74bae3d7d..f6cd86a8fe 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFNextDay.java
@@ -28,9 +28,7 @@
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -40,7 +38,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
-import org.apache.hive.common.util.DateUtils;
+
+import java.util.Calendar;
/**
* GenericUDFNextDay.
@@ -55,9 +54,10 @@
+ " 'yyyy-MM-dd'. day_of_week is day of the week (e.g. Mo, tue, FRIDAY)."
+ "Example:\n " + " > SELECT _FUNC_('2015-01-14', 'TU') FROM src LIMIT 1;\n" + " '2015-01-20'")
public class GenericUDFNextDay extends GenericUDF {
+
private transient Converter[] converters = new Converter[2];
private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[2];
- private final Calendar calendar = Calendar.getInstance();
+ private final Date date = new Date();
private final Text output = new Text();
private transient int dayOfWeekIntConst;
private transient boolean isDayOfWeekConst;
@@ -98,14 +98,13 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
return null;
}
- Date date = getDateValue(arguments, 0, inputTypes, converters);
- if (date == null) {
+ Date d = getDateValue(arguments, 0, inputTypes, converters);
+ if (d == null) {
return null;
}
- nextDay(date, dayOfWeekInt);
- Date newDate = calendar.getTime();
- output.set(DateUtils.getDateFormat().format(newDate));
+ nextDay(d, dayOfWeekInt);
+ output.set(date.toString());
return output;
}
@@ -119,10 +118,34 @@ protected String getFuncName() {
return "next_day";
}
- protected Calendar nextDay(Date date, int dayOfWeek) {
- calendar.setTime(date);
-
- int currDayOfWeek = calendar.get(Calendar.DAY_OF_WEEK);
+ protected Date nextDay(Date d, int dayOfWeek) {
+ date.setTimeInDays(d.toEpochDay());
+
+ int currDayOfWeek;
+ switch (date.getLocalDate().getDayOfWeek()) {
+ case MONDAY:
+ currDayOfWeek = 2;
+ break;
+ case TUESDAY:
+ currDayOfWeek = 3;
+ break;
+ case WEDNESDAY:
+ currDayOfWeek = 4;
+ break;
+ case THURSDAY:
+ currDayOfWeek = 5;
+ break;
+ case FRIDAY:
+ currDayOfWeek = 6;
+ break;
+ case SATURDAY:
+ currDayOfWeek = 7;
+ break;
+ default:
+ // SUNDAY
+ currDayOfWeek = 1;
+ break;
+ }
int daysToAdd;
if (currDayOfWeek < dayOfWeek) {
@@ -131,9 +154,9 @@ protected Calendar nextDay(Date date, int dayOfWeek) {
daysToAdd = 7 - currDayOfWeek + dayOfWeek;
}
- calendar.add(Calendar.DATE, daysToAdd);
+ date.setTimeInDays(date.toEpochDay() + daysToAdd);
- return calendar;
+ return date;
}
protected int getIntDayOfWeek(String dayOfWeek) throws UDFArgumentException {
@@ -164,6 +187,7 @@ protected int getIntDayOfWeek(String dayOfWeek) throws UDFArgumentException {
return -1;
}
+
public static enum DayOfWeek {
MON("MO", "MON", "MONDAY"), TUE("TU", "TUE", "TUESDAY"), WED("WE", "WED", "WEDNESDAY"), THU(
"TH", "THU", "THURSDAY"), FRI("FR", "FRI", "FRIDAY"), SAT("SA", "SAT", "SATURDAY"), SUN(
@@ -201,4 +225,5 @@ public boolean matches(String dayOfWeek) {
return fullName.equalsIgnoreCase(dayOfWeek);
}
}
+
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
index e1673b2c9f..1a80782b65 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java
@@ -18,20 +18,20 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -58,7 +58,7 @@
protected transient Converter dt1Converter;
protected transient Converter dt2Converter;
- protected transient DateWritable dateResult = new DateWritable();
+ protected transient DateWritableV2 dateResult = new DateWritableV2();
protected transient TimestampWritable timestampResult = new TimestampWritable();
protected transient HiveIntervalYearMonthWritable intervalYearMonthResult =
new HiveIntervalYearMonthWritable();
@@ -222,7 +222,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
}
}
- protected DateWritable handleDateResult(Date result) {
+ protected DateWritableV2 handleDateResult(Date result) {
if (result == null) {
return null;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
index a57b373983..45b12408af 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java
@@ -18,20 +18,20 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -57,7 +57,7 @@
protected transient Converter dtConverter;
protected transient TimestampWritable timestampResult = new TimestampWritable();
- protected transient DateWritable dateResult = new DateWritable();
+ protected transient DateWritableV2 dateResult = new DateWritableV2();
protected transient HiveIntervalDayTimeWritable intervalDayTimeResult =
new HiveIntervalDayTimeWritable();
protected transient HiveIntervalYearMonthWritable intervalYearMonthResult =
@@ -217,7 +217,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
}
}
- protected DateWritable handleDateResult(Date result) {
+ protected DateWritableV2 handleDateResult(Date result) {
if (result == null) {
return null;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
index 24068684f3..a538d95b31 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFQuarter.java
@@ -21,9 +21,7 @@
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.VOID_GROUP;
-import java.util.Calendar;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -45,7 +43,6 @@
public class GenericUDFQuarter extends GenericUDF {
private transient Converter[] converters = new Converter[1];
private transient PrimitiveCategory[] inputTypes = new PrimitiveCategory[1];
- private final Calendar calendar = Calendar.getInstance();
private final IntWritable output = new IntWritable();
@Override
@@ -65,8 +62,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
if (date == null) {
return null;
}
- calendar.setTime(date);
- int month = calendar.get(Calendar.MONTH);
+ int month = date.getLocalDate().getMonth().ordinal();
int quarter = (month + 3) / 3;
output.set(quarter);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
index f0fcf69856..4ddcef62b0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect2.java
@@ -20,9 +20,9 @@
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
-import java.sql.Timestamp;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
index 53dfae2d7f..3c3796e8a6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToUnixTimeStamp.java
@@ -18,12 +18,12 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
+import java.util.TimeZone;
-import org.apache.calcite.util.TimestampWithTimeZoneString;
import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
@@ -41,12 +41,10 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
/**
* deterministic version of UDFUnixTimeStamp. enforces argument
@@ -84,6 +82,8 @@ protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentEx
}
}
+ formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
+
PrimitiveObjectInspector arg1OI = (PrimitiveObjectInspector) arguments[0];
switch (arg1OI.getPrimitiveCategory()) {
case CHAR:
@@ -171,7 +171,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException {
}
protected static void setValueFromTs(LongWritable value, Timestamp timestamp) {
- value.set(timestamp.getTime() / 1000);
+ value.set(timestamp.toEpochSecond());
}
@Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
index 372db36f37..77e39f070d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTrunc.java
@@ -20,20 +20,17 @@
package org.apache.hadoop.hive.ql.udf.generic;
import java.math.BigDecimal;
-import java.sql.Timestamp;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -77,7 +74,6 @@
+ " > SELECT _FUNC_(1234567891.1234567891);\n" + "OK\n" + " 1234567891")
public class GenericUDFTrunc extends GenericUDF {
- private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
private transient TimestampConverter timestampConverter;
private transient Converter textConverter1;
private transient Converter textConverter2;
@@ -88,7 +84,7 @@
private transient Converter longConverter;
private transient PrimitiveCategory inputType1;
private transient PrimitiveCategory inputType2;
- private final Calendar calendar = Calendar.getInstance();
+ private final Date date = new Date();
private final Text output = new Text();
private transient String fmtInput;
private transient PrimitiveObjectInspector inputOI;
@@ -297,36 +293,35 @@ private Object evaluateDate(DeferredObject[] arguments) throws UDFArgumentLength
fmtInput = textConverter2.convert(arguments[1].get()).toString();
}
- Date date;
+ Date d;
switch (inputType1) {
case STRING:
String dateString = textConverter1.convert(arguments[0].get()).toString();
try {
- date = formatter.parse(dateString.toString());
- } catch (ParseException e) {
+ d = Date.valueOf(dateString.toString());
+ } catch (IllegalArgumentException e) {
return null;
}
break;
case TIMESTAMP:
Timestamp ts =
((TimestampWritable) timestampConverter.convert(arguments[0].get())).getTimestamp();
- date = ts;
+ d = Date.ofEpochMilli(ts.toEpochMilli());
break;
case DATE:
- DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
- date = dw.get();
+ DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get());
+ d = dw.get();
break;
default:
throw new UDFArgumentTypeException(0,
"TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
}
- if (evalDate(date) == null) {
+ if (evalDate(d) == null) {
return null;
}
- Date newDate = calendar.getTime();
- output.set(formatter.format(newDate));
+ output.set(date.toString());
return output;
}
@@ -427,22 +422,22 @@ public String getDisplayString(String[] children) {
return getStandardDisplayString("trunc", children);
}
- private Calendar evalDate(Date d) throws UDFArgumentException {
- calendar.setTime(d);
+ private Date evalDate(Date d) throws UDFArgumentException {
+ date.setTimeInDays(d.toEpochDay());
if ("MONTH".equals(fmtInput) || "MON".equals(fmtInput) || "MM".equals(fmtInput)) {
- calendar.set(Calendar.DAY_OF_MONTH, 1);
- return calendar;
+ date.setDayOfMonth(1);
+ return date;
} else if ("QUARTER".equals(fmtInput) || "Q".equals(fmtInput)) {
- int month = calendar.get(Calendar.MONTH);
+ int month = date.getLocalDate().getMonth().ordinal();
int quarter = month / 3;
- int monthToSet = quarter * 3;
- calendar.set(Calendar.MONTH, monthToSet);
- calendar.set(Calendar.DAY_OF_MONTH, 1);
- return calendar;
+ int monthToSet = quarter * 3 + 1;
+ date.setMonth(monthToSet);
+ date.setDayOfMonth(1);
+ return date;
} else if ("YEAR".equals(fmtInput) || "YYYY".equals(fmtInput) || "YY".equals(fmtInput)) {
- calendar.set(Calendar.MONTH, 0);
- calendar.set(Calendar.DAY_OF_MONTH, 1);
- return calendar;
+ date.setMonth(1);
+ date.setDayOfMonth(1);
+ return date;
} else {
return null;
}
@@ -485,5 +480,5 @@ protected BigDecimal trunc(BigDecimal input, int scale) {
}
return output;
}
-
+
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
index 832983105f..557ab792ea 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
@@ -20,6 +20,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -44,7 +45,7 @@ protected void initializeInput(ObjectInspector[] arguments) throws UDFArgumentEx
} else {
if (currentTimestamp == null) {
currentTimestamp = new LongWritable(0);
- setValueFromTs(currentTimestamp, SessionState.get().getQueryCurrentTimestamp());
+ setValueFromTs(currentTimestamp, Timestamp.ofEpochMilli(SessionState.get().getQueryCurrentTimestamp().getTime()));
String msg = "unix_timestamp(void) is deprecated. Use current_timestamp instead.";
SessionState.getConsole().printInfo(msg, false);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
index b440d8d848..b34c4d63d4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/ValueBoundaryScanner.java
@@ -18,10 +18,9 @@
package org.apache.hadoop.hive.ql.udf.ptf;
-import java.sql.Timestamp;
-import java.util.Date;
-
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.ql.exec.PTFPartition;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -559,7 +558,7 @@ public boolean isDistanceGreater(Object v1, Object v2, int amt) {
Date l2 = PrimitiveObjectInspectorUtils.getDate(v2,
(PrimitiveObjectInspector) expressionDef.getOI());
if (l1 != null && l2 != null) {
- return (double)(l1.getTime() - l2.getTime())/1000 > (long)amt * 24 * 3600; // Converts amt days to milliseconds
+ return (double)(l1.toEpochMilli() - l2.toEpochMilli())/1000 > (long)amt * 24 * 3600; // Converts amt days to milliseconds
}
return l1 != l2; // True if only one date is null
}
@@ -583,9 +582,9 @@ public TimestampValueBoundaryScanner(BoundaryDef start, BoundaryDef end, OrderEx
public boolean isDistanceGreater(Object v1, Object v2, int amt) {
if (v1 != null && v2 != null) {
long l1 = PrimitiveObjectInspectorUtils.getTimestamp(v1,
- (PrimitiveObjectInspector) expressionDef.getOI()).getTime();
+ (PrimitiveObjectInspector) expressionDef.getOI()).toEpochMilli();
long l2 = PrimitiveObjectInspectorUtils.getTimestamp(v2,
- (PrimitiveObjectInspector) expressionDef.getOI()).getTime();
+ (PrimitiveObjectInspector) expressionDef.getOI()).toEpochMilli();
return (double)(l1-l2)/1000 > amt; // TODO: lossy conversion, distance is considered in seconds
}
return v1 != null || v2 != null; // True if only one value is null
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
index 9a097afd56..16babbf7a7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
@@ -17,17 +17,17 @@
*/
package org.apache.hadoop.hive.ql.util;
-import java.sql.Date;
-import java.sql.Timestamp;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hive.common.util.DateUtils;
+
import java.util.Calendar;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hive.common.util.DateUtils;
-
public class DateTimeMath {
@@ -49,7 +49,6 @@ public void addNanos(int leftNanos, int rightNanos) {
}
protected Calendar calUtc = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
- protected Calendar calLocal = Calendar.getInstance();
protected NanosResult nanosResult = new NanosResult();
//
@@ -62,39 +61,22 @@ public void addNanos(int leftNanos, int rightNanos) {
* @param months
* @return
*/
- public long addMonthsToMillisUtc(long millis, int months) {
+ public long addMonthsToMillis(long millis, int months) {
calUtc.setTimeInMillis(millis);
calUtc.add(Calendar.MONTH, months);
return calUtc.getTimeInMillis();
}
- /**
- * Perform month arithmetic to millis value using local time zone.
- * @param millis
- * @param months
- * @return
- */
- public long addMonthsToMillisLocal(long millis, int months) {
- calLocal.setTimeInMillis(millis);
- calLocal.add(Calendar.MONTH, months);
- return calLocal.getTimeInMillis();
- }
-
- public long addMonthsToNanosUtc(long nanos, int months) {
- long result = addMonthsToMillisUtc(nanos / 1000000, months) * 1000000 + (nanos % 1000000);
- return result;
- }
-
- public long addMonthsToNanosLocal(long nanos, int months) {
- long result = addMonthsToMillisLocal(nanos / 1000000, months) * 1000000 + (nanos % 1000000);
+ public long addMonthsToNanos(long nanos, int months) {
+ long result = addMonthsToMillis(nanos / 1000000, months) * 1000000 + (nanos % 1000000);
return result;
}
public long addMonthsToDays(long days, int months) {
- long millis = DateWritable.daysToMillis((int) days);
- millis = addMonthsToMillisLocal(millis, months);
+ long millis = DateWritableV2.daysToMillis((int) days);
+ millis = addMonthsToMillis(millis, months);
// Convert millis result back to days
- return DateWritable.millisToDays(millis);
+ return DateWritableV2.millisToDays(millis);
}
public Timestamp add(Timestamp ts, HiveIntervalYearMonth interval) {
@@ -102,7 +84,19 @@ public Timestamp add(Timestamp ts, HiveIntervalYearMonth interval) {
return null;
}
- Timestamp tsResult = new Timestamp(0);
+ Timestamp tsResult = new Timestamp();
+ add(ts, interval, tsResult);
+
+ return tsResult;
+ }
+
+ @Deprecated
+ public java.sql.Timestamp add(java.sql.Timestamp ts, HiveIntervalYearMonth interval) {
+ if (ts == null || interval == null) {
+ return null;
+ }
+
+ java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
add(ts, interval, tsResult);
return tsResult;
@@ -113,9 +107,21 @@ public boolean add(Timestamp ts, HiveIntervalYearMonth interval, Timestamp resul
return false;
}
+ long resultMillis = addMonthsToMillis(ts.toEpochMilli(), interval.getTotalMonths());
+ result.setTimeInMillis(resultMillis, ts.getNanos());
+
+ return true;
+ }
+
+ @Deprecated
+ public boolean add(java.sql.Timestamp ts, HiveIntervalYearMonth interval, java.sql.Timestamp result) {
+ if (ts == null || interval == null) {
+ return false;
+ }
+
// Attempt to match Oracle semantics for timestamp arithmetic,
// where timestamp arithmetic is done in UTC, then converted back to local timezone
- long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths());
+ long resultMillis = addMonthsToMillis(ts.getTime(), interval.getTotalMonths());
result.setTime(resultMillis);
result.setNanos(ts.getNanos());
@@ -127,7 +133,19 @@ public Timestamp add(HiveIntervalYearMonth interval, Timestamp ts) {
return null;
}
- Timestamp tsResult = new Timestamp(0);
+ Timestamp tsResult = new Timestamp();
+ add(interval, ts, tsResult);
+
+ return tsResult;
+ }
+
+ @Deprecated
+ public java.sql.Timestamp add(HiveIntervalYearMonth interval, java.sql.Timestamp ts) {
+ if (ts == null || interval == null) {
+ return null;
+ }
+
+ java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
add(interval, ts, tsResult);
return tsResult;
@@ -138,9 +156,19 @@ public boolean add(HiveIntervalYearMonth interval, Timestamp ts, Timestamp resul
return false;
}
- // Attempt to match Oracle semantics for timestamp arithmetic,
- // where timestamp arithmetic is done in UTC, then converted back to local timezone
- long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths());
+ long resultMillis = addMonthsToMillis(ts.toEpochMilli(), interval.getTotalMonths());
+ result.setTimeInMillis(resultMillis, ts.getNanos());
+
+ return true;
+ }
+
+ @Deprecated
+ public boolean add(HiveIntervalYearMonth interval, java.sql.Timestamp ts, java.sql.Timestamp result) {
+ if (ts == null || interval == null) {
+ return false;
+ }
+
+ long resultMillis = addMonthsToMillis(ts.getTime(), interval.getTotalMonths());
result.setTime(resultMillis);
result.setNanos(ts.getNanos());
@@ -152,7 +180,19 @@ public Date add(Date dt, HiveIntervalYearMonth interval) {
return null;
}
- Date dtResult = new Date(0);
+ Date dtResult = new Date();
+ add(dt, interval, dtResult);
+
+ return dtResult;
+ }
+
+ @Deprecated
+ public java.sql.Date add(java.sql.Date dt, HiveIntervalYearMonth interval) {
+ if (dt == null || interval == null) {
+ return null;
+ }
+
+ java.sql.Date dtResult = new java.sql.Date(0);
add(dt, interval, dtResult);
return dtResult;
@@ -163,9 +203,18 @@ public boolean add(Date dt, HiveIntervalYearMonth interval, Date result) {
return false;
}
- // Since Date millis value is in local timezone representation, do date arithmetic
- // using local timezone so the time remains at the start of the day.
- long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths());
+ long resultMillis = addMonthsToMillis(dt.toEpochMilli(), interval.getTotalMonths());
+ result.setTimeInMillis(resultMillis);
+ return true;
+ }
+
+ @Deprecated
+ public boolean add(java.sql.Date dt, HiveIntervalYearMonth interval, java.sql.Date result) {
+ if (dt == null || interval == null) {
+ return false;
+ }
+
+ long resultMillis = addMonthsToMillis(dt.getTime(), interval.getTotalMonths());
result.setTime(resultMillis);
return true;
}
@@ -175,7 +224,19 @@ public Date add(HiveIntervalYearMonth interval, Date dt) {
return null;
}
- Date dtResult = new Date(0);
+ Date dtResult = new Date();
+ add(interval, dt, dtResult);
+
+ return dtResult;
+ }
+
+ @Deprecated
+ public java.sql.Date add(HiveIntervalYearMonth interval, java.sql.Date dt) {
+ if (dt == null || interval == null) {
+ return null;
+ }
+
+ java.sql.Date dtResult = new java.sql.Date(0);
add(interval, dt, dtResult);
return dtResult;
@@ -186,9 +247,18 @@ public boolean add(HiveIntervalYearMonth interval, Date dt, Date result) {
return false;
}
- // Since Date millis value is in local timezone representation, do date arithmetic
- // using local timezone so the time remains at the start of the day.
- long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths());
+ long resultMillis = addMonthsToMillis(dt.toEpochMilli(), interval.getTotalMonths());
+ result.setTimeInMillis(resultMillis);
+ return true;
+ }
+
+ @Deprecated
+ public boolean add(HiveIntervalYearMonth interval, java.sql.Date dt, java.sql.Date result) {
+ if (dt == null || interval == null) {
+ return false;
+ }
+
+ long resultMillis = addMonthsToMillis(dt.getTime(), interval.getTotalMonths());
result.setTime(resultMillis);
return true;
}
@@ -208,7 +278,19 @@ public Timestamp subtract(Timestamp left, HiveIntervalYearMonth right) {
return null;
}
- Timestamp tsResult = new Timestamp(0);
+ Timestamp tsResult = new Timestamp();
+ subtract(left, right, tsResult);
+
+ return tsResult;
+ }
+
+ @Deprecated
+ public java.sql.Timestamp subtract(java.sql.Timestamp left, HiveIntervalYearMonth right) {
+ if (left == null || right == null) {
+ return null;
+ }
+
+ java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
subtract(left, right, tsResult);
return tsResult;
@@ -221,12 +303,32 @@ public boolean subtract(Timestamp left, HiveIntervalYearMonth right, Timestamp r
return add(left, right.negate(), result);
}
+ @Deprecated
+ public boolean subtract(java.sql.Timestamp left, HiveIntervalYearMonth right, java.sql.Timestamp result) {
+ if (left == null || right == null) {
+ return false;
+ }
+ return add(left, right.negate(), result);
+ }
+
public Date subtract(Date left, HiveIntervalYearMonth right) {
if (left == null || right == null) {
return null;
}
- Date dtResult = new Date(0);
+ Date dtResult = new Date();
+ subtract(left, right, dtResult);
+
+ return dtResult;
+ }
+
+ @Deprecated
+ public java.sql.Date subtract(java.sql.Date left, HiveIntervalYearMonth right) {
+ if (left == null || right == null) {
+ return null;
+ }
+
+ java.sql.Date dtResult = new java.sql.Date(0);
subtract(left, right, dtResult);
return dtResult;
@@ -239,6 +341,14 @@ public boolean subtract(Date left, HiveIntervalYearMonth right, Date result) {
return add(left, right.negate(), result);
}
+ @Deprecated
+ public boolean subtract(java.sql.Date left, HiveIntervalYearMonth right, java.sql.Date result) {
+ if (left == null || right == null) {
+ return false;
+ }
+ return add(left, right.negate(), result);
+ }
+
public HiveIntervalYearMonth subtract(HiveIntervalYearMonth left, HiveIntervalYearMonth right) {
if (left == null || right == null) {
return null;
@@ -255,7 +365,19 @@ public Timestamp add(Timestamp ts, HiveIntervalDayTime interval) {
return null;
}
- Timestamp tsResult = new Timestamp(0);
+ Timestamp tsResult = new Timestamp();
+ add(ts, interval, tsResult);
+
+ return tsResult;
+ }
+
+ @Deprecated
+ public java.sql.Timestamp add(java.sql.Timestamp ts, HiveIntervalDayTime interval) {
+ if (ts == null || interval == null) {
+ return null;
+ }
+
+ java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
add(ts, interval, tsResult);
return tsResult;
@@ -269,6 +391,21 @@ public boolean add(Timestamp ts, HiveIntervalDayTime interval,
nanosResult.addNanos(ts.getNanos(), interval.getNanos());
+ long newMillis = ts.toEpochMilli()
+ + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
+ result.setTimeInMillis(newMillis, nanosResult.nanos);
+ return true;
+ }
+
+ @Deprecated
+ public boolean add(java.sql.Timestamp ts, HiveIntervalDayTime interval,
+ java.sql.Timestamp result) {
+ if (ts == null || interval == null) {
+ return false;
+ }
+
+ nanosResult.addNanos(ts.getNanos(), interval.getNanos());
+
long newMillis = ts.getTime()
+ TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
result.setTime(newMillis);
@@ -281,7 +418,18 @@ public Timestamp add(HiveIntervalDayTime interval, Timestamp ts) {
return null;
}
- Timestamp tsResult = new Timestamp(0);
+ Timestamp tsResult = new Timestamp();
+ add(interval, ts, tsResult);
+ return tsResult;
+ }
+
+ @Deprecated
+ public java.sql.Timestamp add(HiveIntervalDayTime interval, java.sql.Timestamp ts) {
+ if (ts == null || interval == null) {
+ return null;
+ }
+
+ java.sql.Timestamp tsResult = new java.sql.Timestamp(0);
add(interval, ts, tsResult);
return tsResult;
}
@@ -294,6 +442,21 @@ public boolean add(HiveIntervalDayTime interval, Timestamp ts,
nanosResult.addNanos(ts.getNanos(), interval.getNanos());
+ long newMillis = ts.toEpochMilli()
+ + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
+ result.setTimeInMillis(newMillis, nanosResult.nanos);
+ return true;
+ }
+
+ @Deprecated
+ public boolean add(HiveIntervalDayTime interval, java.sql.Timestamp ts,
+ java.sql.Timestamp result) {
+ if (ts == null || interval == null) {
+ return false;
+ }
+
+ nanosResult.addNanos(ts.getNanos(), interval.getNanos());
+
long newMillis = ts.getTime()
+ TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds);
result.setTime(newMillis);
@@ -332,6 +495,14 @@ public Timestamp subtract(Timestamp left, HiveIntervalDayTime right) {
return add(left, right.negate());
}
+ @Deprecated
+ public java.sql.Timestamp subtract(java.sql.Timestamp left, HiveIntervalDayTime right) {
+ if (left == null || right == null) {
+ return null;
+ }
+ return add(left, right.negate());
+ }
+
public boolean subtract(Timestamp left, HiveIntervalDayTime right, Timestamp result) {
if (left == null || right == null) {
return false;
@@ -339,6 +510,14 @@ public boolean subtract(Timestamp left, HiveIntervalDayTime right, Timestamp res
return add(left, right.negate(), result);
}
+ @Deprecated
+ public boolean subtract(java.sql.Timestamp left, HiveIntervalDayTime right, java.sql.Timestamp result) {
+ if (left == null || right == null) {
+ return false;
+ }
+ return add(left, right.negate(), result);
+ }
+
public HiveIntervalDayTime subtract(HiveIntervalDayTime left, HiveIntervalDayTime right) {
if (left == null || right == null) {
return null;
@@ -365,6 +544,18 @@ public HiveIntervalDayTime subtract(Timestamp left, Timestamp right) {
return result;
}
+ @Deprecated
+ public HiveIntervalDayTime subtract(java.sql.Timestamp left, java.sql.Timestamp right) {
+ if (left == null || right == null) {
+ return null;
+ }
+
+ HiveIntervalDayTime result = new HiveIntervalDayTime();
+ subtract(left, right, result);
+
+ return result;
+ }
+
public boolean subtract(Timestamp left, Timestamp right,
HiveIntervalDayTime result) {
if (left == null || right == null) {
@@ -373,6 +564,21 @@ public boolean subtract(Timestamp left, Timestamp right,
nanosResult.addNanos(left.getNanos(), -(right.getNanos()));
+ long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.toEpochMilli())
+ - TimeUnit.MILLISECONDS.toSeconds(right.toEpochMilli()) + nanosResult.seconds;
+ result.set(totalSeconds, nanosResult.nanos);
+ return true;
+ }
+
+ @Deprecated
+ public boolean subtract(java.sql.Timestamp left, java.sql.Timestamp right,
+ HiveIntervalDayTime result) {
+ if (left == null || right == null) {
+ return false;
+ }
+
+ nanosResult.addNanos(left.getNanos(), -(right.getNanos()));
+
long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getTime())
- TimeUnit.MILLISECONDS.toSeconds(right.getTime()) + nanosResult.seconds;
result.set(totalSeconds, nanosResult.nanos);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
index 613d7a8d28..f7cd4c61ec 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java
@@ -28,18 +28,16 @@
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
-import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionType;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.ql.udf.UDFAscii;
import org.apache.hadoop.hive.ql.udf.UDFLn;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCurrentTimestamp;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFExplode;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -62,10 +60,10 @@ public void same(Text x, Text y) {}
public void one(IntWritable x, HiveDecimalWritable y) {}
public void one(IntWritable x, DoubleWritable y) {}
public void one(IntWritable x, IntWritable y) {}
- public void mismatch(DateWritable x, HiveDecimalWritable y) {}
+ public void mismatch(DateWritableV2 x, HiveDecimalWritable y) {}
public void mismatch(TimestampWritable x, HiveDecimalWritable y) {}
public void mismatch(BytesWritable x, DoubleWritable y) {}
- public void typeaffinity1(DateWritable x) {}
+ public void typeaffinity1(DateWritableV2 x) {}
public void typeaffinity1(DoubleWritable x) {};
public void typeaffinity1(Text x) {}
public void typeaffinity2(IntWritable x) {}
@@ -158,8 +156,8 @@ public void testTypeAffinity() {
typeAffinity("typeaffinity1", TypeInfoFactory.floatTypeInfo, 1, DoubleWritable.class);
// Prefer date type arguments over other method signatures
- typeAffinity("typeaffinity1", TypeInfoFactory.dateTypeInfo, 1, DateWritable.class);
- typeAffinity("typeaffinity1", TypeInfoFactory.timestampTypeInfo, 1, DateWritable.class);
+ typeAffinity("typeaffinity1", TypeInfoFactory.dateTypeInfo, 1, DateWritableV2.class);
+ typeAffinity("typeaffinity1", TypeInfoFactory.timestampTypeInfo, 1, DateWritableV2.class);
// String type affinity
typeAffinity("typeaffinity1", TypeInfoFactory.stringTypeInfo, 1, Text.class);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
index c8ae73a21d..f29fede1d1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestRowContainer.java
@@ -19,12 +19,12 @@
import static org.junit.Assert.assertEquals;
import java.io.IOException;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.SerDeException;
@@ -55,7 +55,7 @@ public void testSpillTimestamp() throws HiveException, SerDeException, IOExcepti
ObjectInspectorUtils.getStandardObjectInspector(serde.getObjectInspector()));
result.setTableDesc(
PTFRowContainer.createTableDesc((StructObjectInspector) serde.getObjectInspector()));
- TimestampWritable key = new TimestampWritable(new Timestamp(10));
+ TimestampWritable key = new TimestampWritable(Timestamp.ofEpochMilli(10));
result.setKeyObject(Lists.newArrayList(key));
List row;
// will trigger 2 spills
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
index f163289f51..6fd8e0957f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestTimestampWritableAndColumnVector.java
@@ -23,7 +23,7 @@
import java.sql.Timestamp;
import java.util.Random;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
import org.apache.hadoop.hive.ql.util.TimestampUtils;
import static org.junit.Assert.*;
@@ -45,7 +45,7 @@ public void testDouble() throws Exception {
Timestamp[] randTimestamps = new Timestamp[VectorizedRowBatch.DEFAULT_SIZE];
for (int i = 0; i < VectorizedRowBatch.DEFAULT_SIZE; i++) {
- Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r);
+ Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r).toSqlTimestamp();
randTimestamps[i] = randTimestamp;
timestampColVector.set(i, randTimestamp);
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
index fa5c775a98..1768917cbd 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorRandomRowSource.java
@@ -32,7 +32,7 @@
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
index ec5ad2327d..9ad2804ef3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/VectorVerifyFast.java
@@ -19,15 +19,17 @@
package org.apache.hadoop.hive.ql.exec.vector;
import junit.framework.TestCase;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -56,8 +58,6 @@
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -240,7 +240,7 @@ public static void doVerifyDeserializeRead(
case DATE:
{
Date value = deserializeRead.currentDateWritable.get();
- Date expected = ((DateWritable) object).get();
+ Date expected = ((DateWritableV2) object).get();
if (!value.equals(expected)) {
TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
@@ -390,7 +390,7 @@ public static void serializeWrite(SerializeWrite serializeWrite,
break;
case DATE:
{
- Date value = ((DateWritable) object).get();
+ Date value = ((DateWritableV2) object).get();
serializeWrite.writeDate(value);
}
break;
@@ -567,7 +567,7 @@ private static Object doReadComplexPrimitiveField(DeserializeRead deserializeRea
case DECIMAL:
return new HiveDecimalWritable(deserializeRead.currentHiveDecimalWritable);
case DATE:
- return new DateWritable(deserializeRead.currentDateWritable);
+ return new DateWritableV2(deserializeRead.currentDateWritable);
case TIMESTAMP:
return new TimestampWritable(deserializeRead.currentTimestampWritable);
case INTERVAL_YEAR_MONTH:
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
index ffe9c81a50..aeb332ba96 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorDateExpressions.java
@@ -25,11 +25,8 @@
import org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
-import org.apache.hadoop.hive.ql.udf.UDFMonth;
import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
-import org.apache.hadoop.hive.ql.udf.UDFYear;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -38,11 +35,9 @@
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
-import org.junit.internal.runners.statements.Fail;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
@@ -51,7 +46,6 @@
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
-import java.util.concurrent.ThreadFactory;
public class TestVectorDateExpressions {
@@ -59,8 +53,9 @@
/* copied over from VectorUDFTimestampFieldLong */
private TimestampWritable toTimestampWritable(long daysSinceEpoch) {
- Timestamp ts = new Timestamp(DateWritable.daysToMillis((int) daysSinceEpoch));
- return new TimestampWritable(ts);
+ return new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(
+ DateWritableV2.daysToMillis((int) daysSinceEpoch)));
}
private int[] getAllBoundaries() {
@@ -109,10 +104,9 @@ private VectorizedRowBatch getVectorizedRowBatch(int[] inputs, int size) {
}
private void compareToUDFYearDate(long t, int y) {
- UDFYear udf = new UDFYear();
TimestampWritable tsw = toTimestampWritable(t);
- IntWritable res = udf.evaluate(tsw);
- Assert.assertEquals(res.get(), y);
+ int res = tsw.getTimestamp().getYear();
+ Assert.assertEquals(res, y);
}
private void verifyUDFYear(VectorizedRowBatch batch) throws HiveException {
@@ -171,10 +165,9 @@ public void testVectorUDFYear() throws HiveException {
}
private void compareToUDFDayOfMonthDate(long t, int y) {
- UDFDayOfMonth udf = new UDFDayOfMonth();
TimestampWritable tsw = toTimestampWritable(t);
- IntWritable res = udf.evaluate(tsw);
- Assert.assertEquals(res.get(), y);
+ int res = tsw.getTimestamp().getDay();
+ Assert.assertEquals(res, y);
}
private void verifyUDFDayOfMonth(VectorizedRowBatch batch) throws HiveException {
@@ -233,10 +226,9 @@ public void testVectorUDFDayOfMonth() throws HiveException {
}
private void compareToUDFMonthDate(long t, int y) {
- UDFMonth udf = new UDFMonth();
TimestampWritable tsw = toTimestampWritable(t);
- IntWritable res = udf.evaluate(tsw);
- Assert.assertEquals(res.get(), y);
+ int res = tsw.getTimestamp().getMonth();
+ Assert.assertEquals(res, y);
}
private void verifyUDFMonth(VectorizedRowBatch batch) throws HiveException {
@@ -309,7 +301,7 @@ private void compareToUDFUnixTimeStampDate(long t, long y) {
LongWritable res = getLongWritable(tsw);
if(res.get() != y) {
System.out.printf("%d vs %d for %d, %d\n", res.get(), y, t,
- tsw.getTimestamp().getTime()/1000);
+ tsw.getTimestamp().toEpochMilli()/1000);
}
Assert.assertEquals(res.get(), y);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
index fe3c91cab4..07c247e553 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
@@ -84,7 +84,8 @@ private Writable getWritableValue(TypeInfo ti, double value) {
private Writable getWritableValue(TypeInfo ti, Timestamp value) {
- return new TimestampWritable(value);
+ return new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(value.getTime(), value.getNanos()));
}
private Writable getWritableValue(TypeInfo ti, HiveDecimal value) {
@@ -116,7 +117,8 @@ private Writable getWritableValue(TypeInfo ti, long value) {
return new BooleanWritable( value == 0 ? false : true);
} else if (ti.equals(TypeInfoFactory.timestampTypeInfo)) {
Timestamp ts = new Timestamp(value);
- TimestampWritable tw = new TimestampWritable(ts);
+ TimestampWritable tw = new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
return tw;
}
return null;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
index 9792951f7e..757299234c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
@@ -25,7 +25,7 @@
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -88,7 +88,7 @@ private TimestampColumnVector toTimestamp(LongColumnVector date) {
}
private Timestamp toTimestamp(long date) {
- return new Timestamp(DateWritable.daysToMillis((int) date));
+ return new Timestamp(DateWritableV2.daysToMillis((int) date));
}
private BytesColumnVector toString(LongColumnVector date) {
@@ -107,7 +107,7 @@ private BytesColumnVector toString(LongColumnVector date) {
}
private byte[] toString(long date) {
- String formatted = formatter.format(new Date(DateWritable.daysToMillis((int) date)));
+ String formatted = formatter.format(new Date(DateWritableV2.daysToMillis((int) date)));
return formatted.getBytes(utf8);
}
@@ -668,7 +668,7 @@ private void validateDate(VectorizedRowBatch batch, PrimitiveCategory colType,
if (date.isNull[i]) {
Assert.assertTrue(output.isNull[i]);
} else {
- String expected = formatter.format(new Date(DateWritable.daysToMillis((int) date.vector[i])));
+ String expected = formatter.format(new Date(DateWritableV2.daysToMillis((int) date.vector[i])));
Assert.assertEquals(expected, actual);
}
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
index e81844c899..16be817d28 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
@@ -20,13 +20,13 @@
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
-import java.sql.Timestamp;
import java.util.Arrays;
import java.util.Random;
import junit.framework.Assert;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -288,7 +288,7 @@ public static VectorizedRowBatch getVectorizedRowBatchTimestampInLongOut(long[]
for (int i = 0; i < longValues.length; i++) {
Timestamp randTimestamp = RandomTypeUtil.getRandTimestamp(r);
longValues[i] = TimestampWritable.getLong(randTimestamp);
- inV.set(0, randTimestamp);
+ inV.set(0, randTimestamp.toSqlTimestamp());
}
batch.cols[0] = inV;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
index f6dbd672e9..57c70063bc 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTimestampExpressions.java
@@ -26,13 +26,11 @@
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
-import java.util.List;
import java.util.Random;
import junit.framework.Assert;
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -40,18 +38,11 @@
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.udf.UDFDayOfMonth;
-import org.apache.hadoop.hive.ql.udf.UDFHour;
-import org.apache.hadoop.hive.ql.udf.UDFMinute;
-import org.apache.hadoop.hive.ql.udf.UDFMonth;
-import org.apache.hadoop.hive.ql.udf.UDFSecond;
import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear;
-import org.apache.hadoop.hive.ql.udf.UDFYear;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.junit.Test;
@@ -95,7 +86,7 @@ private VectorizedRowBatch getVectorizedRandomRowBatchTimestampLong(int seed, in
TimestampColumnVector tcv = new TimestampColumnVector(size);
Random rand = new Random(seed);
for (int i = 0; i < size; i++) {
- tcv.set(i, RandomTypeUtil.getRandTimestamp(rand));
+ tcv.set(i, RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp());
}
batch.cols[0] = tcv;
batch.cols[1] = new LongColumnVector(size);
@@ -109,7 +100,7 @@ private VectorizedRowBatch getVectorizedRandomRowBatchStringLong(int seed, int s
Random rand = new Random(seed);
for (int i = 0; i < size; i++) {
/* all 32 bit numbers qualify & multiply up to get nano-seconds */
- byte[] encoded = encodeTime(RandomTypeUtil.getRandTimestamp(rand));
+ byte[] encoded = encodeTime(RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp());
bcv.vector[i] = encoded;
bcv.start[i] = 0;
bcv.length[i] = encoded.length;
@@ -225,14 +216,14 @@ private Timestamp readVectorElementAt(ColumnVector col, int i) {
}
private void compareToUDFYearLong(Timestamp t, int y) {
- UDFYear udf = new UDFYear();
- TimestampWritable tsw = new TimestampWritable(t);
- IntWritable res = udf.evaluate(tsw);
- if (res.get() != y) {
- System.out.printf("%d vs %d for %s, %d\n", res.get(), y, t.toString(),
- tsw.getTimestamp().getTime()/1000);
+ TimestampWritable tsw = new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+ int res = tsw.getTimestamp().getYear();
+ if (res != y) {
+ System.out.printf("%d vs %d for %s, %d\n", res, y, t.toString(),
+ tsw.getTimestamp().toEpochMilli()/1000);
}
- Assert.assertEquals(res.get(), y);
+ Assert.assertEquals(res, y);
}
private void verifyUDFYear(VectorizedRowBatch batch, TestType testType)
@@ -323,10 +314,10 @@ public void testVectorUDFYearString() throws HiveException {
}
private void compareToUDFDayOfMonthLong(Timestamp t, int y) {
- UDFDayOfMonth udf = new UDFDayOfMonth();
- TimestampWritable tsw = new TimestampWritable(t);
- IntWritable res = udf.evaluate(tsw);
- Assert.assertEquals(res.get(), y);
+ TimestampWritable tsw = new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+ int res = tsw.getTimestamp().getDay();
+ Assert.assertEquals(res, y);
}
private void verifyUDFDayOfMonth(VectorizedRowBatch batch, TestType testType)
@@ -410,10 +401,10 @@ public void testVectorUDFDayOfMonthString() throws HiveException {
}
private void compareToUDFHourLong(Timestamp t, int y) {
- UDFHour udf = new UDFHour();
- TimestampWritable tsw = new TimestampWritable(t);
- IntWritable res = udf.evaluate(tsw);
- Assert.assertEquals(res.get(), y);
+ TimestampWritable tsw = new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+ int res = tsw.getTimestamp().getHours();
+ Assert.assertEquals(res, y);
}
private void verifyUDFHour(VectorizedRowBatch batch, TestType testType) throws HiveException {
@@ -496,10 +487,10 @@ public void testVectorUDFHourString() throws HiveException {
}
private void compareToUDFMinuteLong(Timestamp t, int y) {
- UDFMinute udf = new UDFMinute();
- TimestampWritable tsw = new TimestampWritable(t);
- IntWritable res = udf.evaluate(tsw);
- Assert.assertEquals(res.get(), y);
+ TimestampWritable tsw = new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+ int res = tsw.getTimestamp().getMinutes();
+ Assert.assertEquals(res, y);
}
private void verifyUDFMinute(VectorizedRowBatch batch, TestType testType)
@@ -583,10 +574,10 @@ public void testVectorUDFMinuteString() throws HiveException {
}
private void compareToUDFMonthLong(Timestamp t, int y) {
- UDFMonth udf = new UDFMonth();
- TimestampWritable tsw = new TimestampWritable(t);
- IntWritable res = udf.evaluate(tsw);
- Assert.assertEquals(res.get(), y);
+ TimestampWritable tsw = new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+ int res = tsw.getTimestamp().getMonth();
+ Assert.assertEquals(res, y);
}
private void verifyUDFMonth(VectorizedRowBatch batch, TestType testType) throws HiveException {
@@ -669,10 +660,10 @@ public void testVectorUDFMonthString() throws HiveException {
}
private void compareToUDFSecondLong(Timestamp t, int y) {
- UDFSecond udf = new UDFSecond();
- TimestampWritable tsw = new TimestampWritable(t);
- IntWritable res = udf.evaluate(tsw);
- Assert.assertEquals(res.get(), y);
+ TimestampWritable tsw = new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+ int res = tsw.getTimestamp().getSeconds();
+ Assert.assertEquals(res, y);
}
private void verifyUDFSecond(VectorizedRowBatch batch, TestType testType) throws HiveException {
@@ -844,7 +835,8 @@ public void testVectorUDFUnixTimeStampString() throws HiveException {
private void compareToUDFWeekOfYearLong(Timestamp t, int y) {
UDFWeekOfYear udf = new UDFWeekOfYear();
- TimestampWritable tsw = new TimestampWritable(t);
+ TimestampWritable tsw = new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
IntWritable res = udf.evaluate(tsw);
Assert.assertEquals(res.get(), y);
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
index 8499da6d00..8c36f7df4d 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
@@ -22,19 +22,15 @@
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
-import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
-import java.math.MathContext;
-import java.math.RoundingMode;
import java.sql.Timestamp;
-import java.util.Arrays;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import junit.framework.Assert;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
@@ -42,12 +38,10 @@
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.*;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.util.TimestampUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
-import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.junit.Test;
@@ -87,7 +81,7 @@ public void testCastDateToTimestamp() throws HiveException {
expr.evaluate(b);
for (int i = 0; i < intValues.length; i++) {
Timestamp timestamp = resultV.asScratchTimestamp(i);
- long actual = DateWritable.millisToDays(timestamp.getTime());
+ long actual = DateWritableV2.millisToDays(timestamp.getTime());
assertEquals(actual, intValues[i]);
}
}
@@ -154,7 +148,8 @@ public void testCastLongToTimestamp() throws HiveException {
expr.evaluate(b);
for (int i = 0; i < longValues.length; i++) {
Timestamp timestamp = resultV.asScratchTimestamp(i);
- long actual = TimestampWritable.getLong(timestamp);
+ long actual = TimestampWritable.getLong(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(timestamp.getTime(), timestamp.getNanos()));
assertEquals(actual, longValues[i]);
}
}
@@ -514,7 +509,8 @@ private VectorizedRowBatch getBatchDecimalTimestamp(double[] doubleValues) {
Timestamp ts = new Timestamp(millis);
int nanos = RandomTypeUtil.randomNanos(r);
ts.setNanos(nanos);
- TimestampWritable tsw = new TimestampWritable(ts);
+ TimestampWritable tsw = new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
double asDouble = tsw.getDouble();
doubleValues[i] = asDouble;
HiveDecimal hiveDecimal = HiveDecimal.create(new BigDecimal(asDouble));
@@ -578,7 +574,8 @@ private VectorizedRowBatch getBatchTimestampDecimal(HiveDecimal[] hiveDecimalVal
long millis = RandomTypeUtil.randomMillis(r);
Timestamp ts = new Timestamp(millis);
ts.setNanos(optionalNanos);
- TimestampWritable tsw = new TimestampWritable(ts);
+ TimestampWritable tsw = new TimestampWritable(
+ org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos()));
hiveDecimalValues[i] = tsw.getHiveDecimal();
tcv.set(i, ts);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
index f257363691..8efbd9959b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/mapjoin/fast/VerifyFastRow.java
@@ -19,8 +19,6 @@
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
@@ -29,15 +27,17 @@
import junit.framework.TestCase;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -63,7 +63,6 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
/**
* (Copy of VerifyFast from serde).
@@ -245,7 +244,7 @@ public static void doVerifyDeserializeRead(DeserializeRead deserializeRead,
case DATE:
{
Date value = deserializeRead.currentDateWritable.get();
- Date expected = ((DateWritable) object).get();
+ Date expected = ((DateWritableV2) object).get();
if (!value.equals(expected)) {
TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
@@ -395,7 +394,7 @@ public static void serializeWrite(SerializeWrite serializeWrite,
break;
case DATE:
{
- Date value = ((DateWritable) object).get();
+ Date value = ((DateWritableV2) object).get();
serializeWrite.writeDate(value);
}
break;
@@ -572,7 +571,7 @@ private static Object doReadComplexPrimitiveField(DeserializeRead deserializeRea
case DECIMAL:
return new HiveDecimalWritable(deserializeRead.currentHiveDecimalWritable);
case DATE:
- return new DateWritable(deserializeRead.currentDateWritable);
+ return new DateWritableV2(deserializeRead.currentDateWritable);
case TIMESTAMP:
return new TimestampWritable(deserializeRead.currentTimestampWritable);
case INTERVAL_YEAR_MONTH:
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
index 3fe8b09ffa..878d286281 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
@@ -22,7 +22,7 @@
import java.util.Random;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -86,7 +86,7 @@ public static TimestampColumnVector generateTimestampColumnVector(
tcv.noNulls = !nulls;
tcv.isRepeating = repeating;
- Timestamp repeatingTimestamp = RandomTypeUtil.getRandTimestamp(rand);
+ Timestamp repeatingTimestamp = RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp();
int nullFrequency = generateNullFrequency(rand);
@@ -98,7 +98,7 @@ public static TimestampColumnVector generateTimestampColumnVector(
}else {
tcv.isNull[i] = false;
if (!repeating) {
- Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(rand);
+ Timestamp randomTimestamp = RandomTypeUtil.getRandTimestamp(rand).toSqlTimestamp();
tcv.set(i, randomTimestamp);
timestampValues[i] = randomTimestamp;
} else {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
index 1064b1986a..9bf9d9d169 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorColumnGroupGenerator.java
@@ -22,7 +22,7 @@
import java.util.Arrays;
import java.util.Random;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -31,8 +31,6 @@
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType;
import org.apache.hadoop.hive.ql.exec.vector.util.batchgen.VectorBatchGenerator.GenerateType.GenerateCategory;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.Text;
public class VectorColumnGroupGenerator {
@@ -232,7 +230,7 @@ private void generateRowColumnValue(int rowIndex, int columnIndex, Random random
case TIMESTAMP:
{
- Timestamp value = RandomTypeUtil.getRandTimestamp(random);
+ Timestamp value = RandomTypeUtil.getRandTimestamp(random).toSqlTimestamp();
((Timestamp[]) array)[rowIndex] = value;
}
break;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java b/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
index 74f6624597..cf73bb9004 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/arrow/TestArrowColumnarBatchSerDe.java
@@ -26,6 +26,7 @@
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.AbstractSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
@@ -56,7 +57,6 @@
import org.junit.Before;
import org.junit.Test;
-import java.sql.Timestamp;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -111,10 +111,10 @@
private final static Timestamp NEGATIVE_TIMESTAMP_WITH_NANOS;
static {
- TIMESTAMP = new Timestamp(TIME_IN_MS);
+ TIMESTAMP = Timestamp.ofEpochMilli(TIME_IN_MS);
TIMESTAMP.setNanos(123456789);
- NEGATIVE_TIMESTAMP_WITHOUT_NANOS = new Timestamp(NEGATIVE_TIME_IN_MS);
- NEGATIVE_TIMESTAMP_WITH_NANOS = new Timestamp(NEGATIVE_TIME_IN_MS);
+ NEGATIVE_TIMESTAMP_WITHOUT_NANOS = Timestamp.ofEpochMilli(NEGATIVE_TIME_IN_MS);
+ NEGATIVE_TIMESTAMP_WITH_NANOS = Timestamp.ofEpochMilli(NEGATIVE_TIME_IN_MS);
NEGATIVE_TIMESTAMP_WITH_NANOS.setNanos(123456789);
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
index ef678a8eb3..24c8a689c8 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
@@ -25,11 +25,8 @@
import java.io.File;
import java.io.IOException;
-import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -43,12 +40,14 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -1315,7 +1314,7 @@ public void createOrcDateFile(Path file, int minYear, int maxYear
new TimestampWritable(Timestamp.valueOf(year + "-05-05 12:34:56."
+ ms)));
row.setFieldValue(1,
- new DateWritable(new Date(year - 1900, 11, 25)));
+ new DateWritableV2(Date.of(year - 1900, 11, 25)));
writer.addRow(row);
}
}
@@ -1329,7 +1328,7 @@ public void createOrcDateFile(Path file, int minYear, int maxYear
assertEquals(new TimestampWritable
(Timestamp.valueOf(year + "-05-05 12:34:56." + ms)),
row.getFieldValue(0));
- assertEquals(new DateWritable(new Date(year - 1900, 11, 25)),
+ assertEquals(new DateWritableV2(Date.of(year - 1900, 11, 25)),
row.getFieldValue(1));
}
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
index 0c9c95d534..9618360108 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestVectorizedORCReader.java
@@ -19,9 +19,7 @@
package org.apache.hadoop.hive.ql.io.orc;
import java.io.File;
-import java.sql.Date;
-import java.sql.Timestamp;
-import java.util.Calendar;
+import java.time.LocalDateTime;
import java.util.Random;
import junit.framework.Assert;
@@ -29,7 +27,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -38,7 +38,7 @@
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -133,7 +133,7 @@ public void createFile() throws Exception {
for (int i = 0; i < 21000; ++i) {
if ((i % 7) != 0) {
writer.addRow(new MyRecord(((i % 3) == 0), (byte)(i % 5), i, (long) 200, (short) (300 + i), (double) (400 + i),
- words[r1.nextInt(words.length)], new Timestamp(Calendar.getInstance().getTime().getTime()),
+ words[r1.nextInt(words.length)], new Timestamp(LocalDateTime.now()),
Date.valueOf(dates[i % 3]), HiveDecimal.create(decimalStrings[i % decimalStrings.length])));
} else {
writer.addRow(new MyRecord(null, null, i, (long) 200, null, null, null, null, null, null));
@@ -179,13 +179,13 @@ private void checkVectorizedReader() throws Exception {
TimestampColumnVector tcv = ((TimestampColumnVector) cv);
Assert.assertEquals(t.getTimestamp(), tcv.asScratchTimestamp(rowId));
- } else if (a instanceof DateWritable) {
+ } else if (a instanceof DateWritableV2) {
// Dates are stored as long, so convert and compare
- DateWritable adt = (DateWritable) a;
+ DateWritableV2 adt = (DateWritableV2) a;
long b = ((LongColumnVector) cv).vector[rowId];
- Assert.assertEquals(adt.get().getTime(),
- DateWritable.daysToMillis((int) b));
+ Assert.assertEquals(adt.get().toEpochMilli(),
+ DateWritableV2.daysToMillis((int) b));
} else if (a instanceof HiveDecimalWritable) {
// Decimals are stored as BigInteger, so convert and compare
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
index a2304410b6..1d32afe00c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/VectorizedColumnReaderTestBase.java
@@ -23,6 +23,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
@@ -62,7 +63,6 @@
import org.apache.parquet.schema.MessageType;
import java.io.IOException;
-import java.sql.Timestamp;
import java.util.Arrays;
import java.util.List;
@@ -209,7 +209,9 @@ protected static boolean getBooleanValue(
}
protected static NanoTime getNanoTime(int index) {
- return NanoTimeUtils.getNanoTime(new Timestamp(index), false);
+ Timestamp ts = new Timestamp();
+ ts.setTimeInMillis(index);
+ return NanoTimeUtils.getNanoTime(ts, false);
}
protected static HiveDecimal getDecimal(
@@ -376,8 +378,13 @@ protected void timestampRead(boolean isDictionaryEncoding) throws InterruptedExc
if (c == nElements) {
break;
}
- Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp(c);
- assertEquals("Not the same time at " + c, expected.getTime(), vector.getTime(i));
+ Timestamp expected = new Timestamp();
+ if (isDictionaryEncoding) {
+ expected.setTimeInMillis(c % UNIQUE_NUM);
+ } else {
+ expected.setTimeInMillis(c);
+ }
+ assertEquals("Not the same time at " + c, expected.toEpochMilli(), vector.getTime(i));
assertEquals("Not the same nano at " + c, expected.getNanos(), vector.getNanos(i));
assertFalse(vector.isNull[i]);
c++;
@@ -408,8 +415,12 @@ protected void stringReadTimestamp(boolean isDictionaryEncoding) throws Interrup
break;
}
- Timestamp expected = isDictionaryEncoding ? new Timestamp(c % UNIQUE_NUM) : new Timestamp(
- c);
+ Timestamp expected = new Timestamp();
+ if (isDictionaryEncoding) {
+ expected.setTimeInMillis(c % UNIQUE_NUM);
+ } else {
+ expected.setTimeInMillis(c);
+ };
String actual = new String(Arrays
.copyOfRange(vector.vector[i], vector.start[i], vector.start[i] + vector.length[i]));
assertEquals("Not the same time at " + c, expected.toString(), actual);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
index d14f0a99ac..0d0057e080 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
@@ -13,19 +13,18 @@
*/
package org.apache.hadoop.hive.ql.io.parquet.serde;
-import java.sql.Timestamp;
import java.util.Calendar;
-import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
-import junit.framework.Assert;
-import junit.framework.TestCase;
-
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
/**
@@ -42,7 +41,7 @@ public void testJulianDay() {
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.setTimeZone(TimeZone.getTimeZone("GMT"));
- Timestamp ts = new Timestamp(cal.getTimeInMillis());
+ Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis());
NanoTime nt = NanoTimeUtils.getNanoTime(ts, false);
Assert.assertEquals(nt.getJulianDay(), 2440000);
@@ -57,7 +56,7 @@ public void testJulianDay() {
cal1.set(Calendar.HOUR_OF_DAY, 0);
cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
- Timestamp ts1 = new Timestamp(cal1.getTimeInMillis());
+ Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis());
NanoTime nt1 = NanoTimeUtils.getNanoTime(ts1, false);
Timestamp ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false);
@@ -70,7 +69,7 @@ public void testJulianDay() {
cal2.set(Calendar.HOUR_OF_DAY, 0);
cal2.setTimeZone(TimeZone.getTimeZone("UTC"));
- Timestamp ts2 = new Timestamp(cal2.getTimeInMillis());
+ Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis());
NanoTime nt2 = NanoTimeUtils.getNanoTime(ts2, false);
Timestamp ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false);
@@ -86,7 +85,7 @@ public void testJulianDay() {
cal1.set(Calendar.HOUR_OF_DAY, 0);
cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
- ts1 = new Timestamp(cal1.getTimeInMillis());
+ ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis());
nt1 = NanoTimeUtils.getNanoTime(ts1, false);
ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false);
@@ -99,7 +98,7 @@ public void testJulianDay() {
cal2.set(Calendar.HOUR_OF_DAY, 0);
cal2.setTimeZone(TimeZone.getTimeZone("UTC"));
- ts2 = new Timestamp(cal2.getTimeInMillis());
+ ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis());
nt2 = NanoTimeUtils.getNanoTime(ts2, false);
ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false);
@@ -117,8 +116,7 @@ public void testNanos() {
cal.set(Calendar.MINUTE, 1);
cal.set(Calendar.SECOND, 1);
cal.setTimeZone(TimeZone.getTimeZone("GMT"));
- Timestamp ts = new Timestamp(cal.getTimeInMillis());
- ts.setNanos(1);
+ Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1);
//(1*60*60 + 1*60 + 1) * 10e9 + 1
NanoTime nt = NanoTimeUtils.getNanoTime(ts, false);
@@ -133,8 +131,7 @@ public void testNanos() {
cal.set(Calendar.MINUTE, 59);
cal.set(Calendar.SECOND, 59);
cal.setTimeZone(TimeZone.getTimeZone("GMT"));
- ts = new Timestamp(cal.getTimeInMillis());
- ts.setNanos(999999999);
+ ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 999999999);
//(23*60*60 + 59*60 + 59)*10e9 + 999999999
nt = NanoTimeUtils.getNanoTime(ts, false);
@@ -149,8 +146,7 @@ public void testNanos() {
cal2.set(Calendar.MINUTE, 10);
cal2.set(Calendar.SECOND, 0);
cal2.setTimeZone(TimeZone.getTimeZone("GMT"));
- Timestamp ts2 = new Timestamp(cal2.getTimeInMillis());
- ts2.setNanos(10);
+ Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis(), 10);
Calendar cal1 = Calendar.getInstance();
cal1.set(Calendar.YEAR, 1968);
@@ -160,8 +156,7 @@ public void testNanos() {
cal1.set(Calendar.MINUTE, 0);
cal1.set(Calendar.SECOND, 0);
cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
- Timestamp ts1 = new Timestamp(cal1.getTimeInMillis());
- ts1.setNanos(1);
+ Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis(), 1);
NanoTime n2 = NanoTimeUtils.getNanoTime(ts2, false);
NanoTime n1 = NanoTimeUtils.getNanoTime(ts1, false);
@@ -183,8 +178,7 @@ public void testTimezone() {
cal.set(Calendar.MINUTE, 1);
cal.set(Calendar.SECOND, 1);
cal.setTimeZone(TimeZone.getTimeZone("US/Pacific"));
- Timestamp ts = new Timestamp(cal.getTimeInMillis());
- ts.setNanos(1);
+ Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis(), 1);
/**
* 17:00 PDT = 00:00 GMT (daylight-savings)
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
index 406ceceed1..97695c266f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzer.java
@@ -19,11 +19,11 @@
import static org.junit.Assert.*;
-import java.sql.Date;
import java.util.HashMap;
import java.util.Map;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.junit.Test;
public class TestSemanticAnalyzer {
@@ -61,7 +61,7 @@ public void checkNormalization(String colType, String originalColSpec,
BaseSemanticAnalyzer.normalizeColSpec(partSpec, colName, colType, originalColSpec, colValue);
assertEquals(result, partSpec.get(colName));
if (colValue instanceof Date) {
- DateWritable dw = new DateWritable((Date)colValue);
+ DateWritableV2 dw = new DateWritableV2((Date)colValue);
BaseSemanticAnalyzer.normalizeColSpec(partSpec, colName, colType, originalColSpec, dw);
assertEquals(result, partSpec.get(colName));
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
index 9f20ff656b..9c48359d61 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/TestUDFDateFormatGranularity.java
@@ -17,11 +17,10 @@
*/
package org.apache.hadoop.hive.ql.udf;
-import java.sql.Timestamp;
import java.time.Instant;
import java.time.ZoneId;
-import java.time.ZoneOffset;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -37,56 +36,56 @@
public void testTimestampToTimestampWithGranularity() throws Exception {
// Running example
// Friday 30th August 1985 02:47:02 AM
- final TimestampWritable t = new TimestampWritable(new Timestamp(494243222000L));
+ final TimestampWritable t = new TimestampWritable(Timestamp.ofEpochMilli(494243222000L));
UDFDateFloor g;
// Year granularity
// Tuesday 1st January 1985 12:00:00 AM
g = new UDFDateFloorYear();
TimestampWritable i1 = g.evaluate(t);
- assertEquals(473414400000L, i1.getTimestamp().getTime());
+ assertEquals(473414400000L, i1.getTimestamp().toEpochMilli());
// Quarter granularity
// Monday 1st July 1985 12:00:00 AM
g = new UDFDateFloorQuarter();
TimestampWritable i2 = g.evaluate(t);
- assertEquals(489049200000L, i2.getTimestamp().getTime());
+ assertEquals(489049200000L, i2.getTimestamp().toEpochMilli());
// Month granularity
// Thursday 1st August 1985 12:00:00 AM
g = new UDFDateFloorMonth();
TimestampWritable i3 = g.evaluate(t);
- assertEquals(491727600000L, i3.getTimestamp().getTime());
+ assertEquals(491727600000L, i3.getTimestamp().toEpochMilli());
// Week granularity
// Monday 26th August 1985 12:00:00 AM
g = new UDFDateFloorWeek();
TimestampWritable i4 = g.evaluate(t);
- assertEquals(493887600000L, i4.getTimestamp().getTime());
+ assertEquals(493887600000L, i4.getTimestamp().toEpochMilli());
// Day granularity
// Friday 30th August 1985 12:00:00 AM
g = new UDFDateFloorDay();
TimestampWritable i5 = g.evaluate(t);
- assertEquals(494233200000L, i5.getTimestamp().getTime());
+ assertEquals(494233200000L, i5.getTimestamp().toEpochMilli());
// Hour granularity
// Friday 30th August 1985 02:00:00 AM
g = new UDFDateFloorHour();
TimestampWritable i6 = g.evaluate(t);
- assertEquals(494240400000L, i6.getTimestamp().getTime());
+ assertEquals(494240400000L, i6.getTimestamp().toEpochMilli());
// Minute granularity
// Friday 30th August 1985 02:47:00 AM
g = new UDFDateFloorMinute();
TimestampWritable i7 = g.evaluate(t);
- assertEquals(494243220000L, i7.getTimestamp().getTime());
+ assertEquals(494243220000L, i7.getTimestamp().toEpochMilli());
// Second granularity
// Friday 30th August 1985 02:47:02 AM
g = new UDFDateFloorSecond();
TimestampWritable i8 = g.evaluate(t);
- assertEquals(494243222000L, i8.getTimestamp().getTime());
+ assertEquals(494243222000L, i8.getTimestamp().toEpochMilli());
}
@Test
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
index 0acb46db16..842f77caeb 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDate.java
@@ -18,22 +18,22 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
-import junit.framework.TestCase;
+import java.time.LocalDateTime;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDate;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import junit.framework.TestCase;
+
public class TestGenericUDFDate extends TestCase {
public void testStringToDate() throws HiveException {
GenericUDFDate udf = new GenericUDFDate();
@@ -43,13 +43,13 @@ public void testStringToDate() throws HiveException {
udf.initialize(arguments);
DeferredObject valueObj = new DeferredJavaObject(new Text("2009-07-30"));
DeferredObject[] args = {valueObj};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("to_date() test for STRING failed ", "2009-07-30", output.toString());
// Try with null args
DeferredObject[] nullArgs = { new DeferredJavaObject(null) };
- output = (DateWritable) udf.evaluate(nullArgs);
+ output = (DateWritableV2) udf.evaluate(nullArgs);
assertNull("to_date() with null STRING", output);
}
@@ -59,16 +59,16 @@ public void testTimestampToDate() throws HiveException {
ObjectInspector[] arguments = {valueOI};
udf.initialize(arguments);
- DeferredObject valueObj = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
- 30, 4, 17, 52, 0)));
+ DeferredObject valueObj = new DeferredJavaObject(new TimestampWritable(
+ new Timestamp(LocalDateTime.of(109, 06, 30, 4, 17, 52, 0))));
DeferredObject[] args = {valueObj};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("to_date() test for TIMESTAMP failed ", "2009-07-30", output.toString());
// Try with null args
DeferredObject[] nullArgs = { new DeferredJavaObject(null) };
- output = (DateWritable) udf.evaluate(nullArgs);
+ output = (DateWritableV2) udf.evaluate(nullArgs);
assertNull("to_date() with null TIMESTAMP", output);
}
@@ -78,15 +78,15 @@ public void testDateWritablepToDate() throws HiveException {
ObjectInspector[] arguments = {valueOI};
udf.initialize(arguments);
- DeferredObject valueObj = new DeferredJavaObject(new DateWritable(new Date(109, 06, 30)));
+ DeferredObject valueObj = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 30)));
DeferredObject[] args = {valueObj};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("to_date() test for DATEWRITABLE failed ", "2009-07-30", output.toString());
// Try with null args
DeferredObject[] nullArgs = { new DeferredJavaObject(null) };
- output = (DateWritable) udf.evaluate(nullArgs);
+ output = (DateWritableV2) udf.evaluate(nullArgs);
assertNull("to_date() with null DATE", output);
}
@@ -97,7 +97,7 @@ public void testVoidToDate() throws HiveException {
udf.initialize(arguments);
DeferredObject[] args = { new DeferredJavaObject(null) };
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
// Try with null VOID
assertNull("to_date() with null DATE ", output);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
index 9caf3b77e6..0bbf04adc1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateAdd.java
@@ -18,21 +18,21 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
-import junit.framework.TestCase;
+import java.time.LocalDateTime;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import junit.framework.TestCase;
+
public class TestGenericUDFDateAdd extends TestCase {
public void testStringToDate() throws HiveException {
GenericUDFDateAdd udf = new GenericUDFDateAdd();
@@ -44,7 +44,7 @@ public void testStringToDate() throws HiveException {
DeferredObject valueObj1 = new DeferredJavaObject(new Text("2009-07-20 04:17:52"));
DeferredObject valueObj2 = new DeferredJavaObject(new Integer("2"));
DeferredObject[] args = {valueObj1, valueObj2};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("date_add() test for STRING failed ", "2009-07-22", output.toString());
@@ -66,11 +66,11 @@ public void testTimestampToDate() throws HiveException {
ObjectInspector[] arguments = {valueOI1, valueOI2};
udf.initialize(arguments);
- DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
- 20, 4, 17, 52, 0)));
+ DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(
+ new Timestamp(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0))));
DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3"));
DeferredObject[] args = {valueObj1, valueObj2};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("date_add() test for TIMESTAMP failed ", "2009-07-23", output.toString());
@@ -93,10 +93,10 @@ public void testDateWritablepToDate() throws HiveException {
udf.initialize(arguments);
- DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4"));
DeferredObject[] args = {valueObj1, valueObj2};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("date_add() test for DATEWRITABLE failed ", "2009-07-24", output.toString());
@@ -118,10 +118,10 @@ public void testByteDataTypeAsDays() throws HiveException {
ObjectInspector[] arguments = {valueOI1, valueOI2};
udf.initialize(arguments);
- DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
DeferredObject valueObj2 = new DeferredJavaObject(new Byte("4"));
DeferredObject[] args = {valueObj1, valueObj2};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("date_add() test for BYTE failed ", "2009-07-24", output.toString());
}
@@ -133,10 +133,10 @@ public void testShortDataTypeAsDays() throws HiveException {
ObjectInspector[] arguments = {valueOI1, valueOI2};
udf.initialize(arguments);
- DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
DeferredObject valueObj2 = new DeferredJavaObject(new Short("4"));
DeferredObject[] args = {valueObj1, valueObj2};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("date_add() test for SHORT failed ", "2009-07-24", output.toString());
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
index 3f4ea3f988..71861bd25c 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateDiff.java
@@ -18,22 +18,22 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
-import junit.framework.TestCase;
+import java.time.LocalDateTime;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateDiff;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
+import junit.framework.TestCase;
+
public class TestGenericUDFDateDiff extends TestCase {
public void testStringToDate() throws HiveException {
GenericUDFDateDiff udf = new GenericUDFDateDiff();
@@ -67,10 +67,10 @@ public void testTimestampToDate() throws HiveException {
ObjectInspector[] arguments = {valueOI1, valueOI2};
udf.initialize(arguments);
- DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
- 20, 0, 0, 0, 0)));
- DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
- 17, 0, 0, 0, 0)));
+ DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(
+ new Timestamp(LocalDateTime.of(109, 06, 20, 0, 0, 0, 0))));
+ DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritable(
+ new Timestamp(LocalDateTime.of(109, 06, 17, 0, 0, 0, 0))));
DeferredObject[] args = {valueObj1, valueObj2};
IntWritable output = (IntWritable) udf.evaluate(args);
@@ -95,8 +95,8 @@ public void testDateWritablepToDate() throws HiveException {
udf.initialize(arguments);
- DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
- DeferredObject valueObj2 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 10)));
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
+ DeferredObject valueObj2 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 10)));
DeferredObject[] args = {valueObj1, valueObj2};
IntWritable output = (IntWritable) udf.evaluate(args);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
index d29d964bd0..1c0013ddfd 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
@@ -17,21 +17,19 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
import junit.framework.TestCase;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.Text;
-import org.junit.Assume;
public class TestGenericUDFDateFormat extends TestCase {
@@ -165,7 +163,7 @@ private void runAndVerifyStr(String str, Text fmtText, String expResult, Generic
private void runAndVerifyDate(String str, Text fmtText, String expResult, GenericUDF udf)
throws HiveException {
- DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritable(
+ DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritableV2(
Date.valueOf(str)) : null);
DeferredObject valueObj1 = new DeferredJavaObject(fmtText);
DeferredObject[] args = { valueObj0, valueObj1 };
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
index cb00cfd189..502dfaba48 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateSub.java
@@ -18,21 +18,21 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
-import junit.framework.TestCase;
+import java.time.LocalDateTime;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import junit.framework.TestCase;
+
public class TestGenericUDFDateSub extends TestCase {
public void testStringToDate() throws HiveException {
GenericUDFDateSub udf = new GenericUDFDateSub();
@@ -44,7 +44,7 @@ public void testStringToDate() throws HiveException {
DeferredObject valueObj1 = new DeferredJavaObject(new Text("2009-07-20 04:17:52"));
DeferredObject valueObj2 = new DeferredJavaObject(new Integer("2"));
DeferredObject[] args = {valueObj1, valueObj2};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("date_sub() test for STRING failed ", "2009-07-18", output.toString());
@@ -66,11 +66,11 @@ public void testTimestampToDate() throws HiveException {
ObjectInspector[] arguments = {valueOI1, valueOI2};
udf.initialize(arguments);
- DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06,
- 20, 4, 17, 52, 0)));
+ DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(
+ new Timestamp(LocalDateTime.of(109, 06, 20, 4, 17, 52, 0))));
DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3"));
DeferredObject[] args = {valueObj1, valueObj2};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("date_sub() test for TIMESTAMP failed ", "2009-07-17", output.toString());
@@ -93,10 +93,10 @@ public void testDateWritablepToDate() throws HiveException {
udf.initialize(arguments);
- DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4"));
DeferredObject[] args = {valueObj1, valueObj2};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("date_sub() test for DATEWRITABLE failed ", "2009-07-16", output.toString());
@@ -118,10 +118,10 @@ public void testByteDataTypeAsDays() throws HiveException {
ObjectInspector[] arguments = {valueOI1, valueOI2};
udf.initialize(arguments);
- DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
DeferredObject valueObj2 = new DeferredJavaObject(new Byte("4"));
DeferredObject[] args = {valueObj1, valueObj2};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("date_add() test for BYTE failed ", "2009-07-16", output.toString());
}
@@ -133,10 +133,10 @@ public void testShortDataTypeAsDays() throws HiveException {
ObjectInspector[] arguments = {valueOI1, valueOI2};
udf.initialize(arguments);
- DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritableV2(Date.of(109, 06, 20)));
DeferredObject valueObj2 = new DeferredJavaObject(new Short("4"));
DeferredObject[] args = {valueObj1, valueObj2};
- DateWritable output = (DateWritable) udf.evaluate(args);
+ DateWritableV2 output = (DateWritableV2) udf.evaluate(args);
assertEquals("date_add() test for SHORT failed ", "2009-07-16", output.toString());
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
index bc8572e3b8..519a6635e1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFFromUtcTimestamp.java
@@ -18,17 +18,13 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
import java.sql.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import junit.framework.TestCase;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
index 4677aa7409..9787454371 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFGreatest.java
@@ -18,12 +18,13 @@
package org.apache.hadoop.hive.ql.udf.generic;
import junit.framework.TestCase;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -32,8 +33,6 @@
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
-import java.sql.Date;
-
public class TestGenericUDFGreatest extends TestCase {
public void testOneArg() throws HiveException {
@@ -210,7 +209,7 @@ private Object getWritable(Object o) {
} else if (o instanceof Double) {
return o != null ? new DoubleWritable((Double) o) : null;
} else if (o instanceof Date) {
- return o != null ? new DateWritable((Date) o) : null;
+ return o != null ? new DateWritableV2((Date) o) : null;
} else if (o instanceof Byte) {
return o != null ? new ByteWritable((Byte) o): null;
} else if (o instanceof Short) {
@@ -231,8 +230,8 @@ private Object parseOutput(Object o) {
return ((IntWritable) o).get();
} else if (o instanceof DoubleWritable) {
return ((DoubleWritable) o).get();
- } else if (o instanceof DateWritable) {
- return ((DateWritable) o).get();
+ } else if (o instanceof DateWritableV2) {
+ return ((DateWritableV2) o).get();
} else if (o instanceof ByteWritable) {
return ((ByteWritable) o).get();
} else if (o instanceof ShortWritable) {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
index 7d7c84da72..41db2032ab 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLastDay.java
@@ -17,10 +17,7 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Timestamp;
-
-import junit.framework.TestCase;
-
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
@@ -29,6 +26,8 @@
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
+import junit.framework.TestCase;
+
public class TestGenericUDFLastDay extends TestCase {
public void testLastDay() throws HiveException {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
index f966cb06d3..cccc70e0dc 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFLeast.java
@@ -17,16 +17,15 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-
import junit.framework.TestCase;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -210,7 +209,7 @@ private Object getWritable(Object o) {
} else if (o instanceof Double) {
return o != null ? new DoubleWritable((Double) o) : null;
} else if (o instanceof Date) {
- return o != null ? new DateWritable((Date) o) : null;
+ return o != null ? new DateWritableV2((Date) o) : null;
} else if (o instanceof Byte) {
return o != null ? new ByteWritable((Byte) o): null;
} else if (o instanceof Short) {
@@ -231,8 +230,8 @@ private Object parseOutput(Object o) {
return ((IntWritable) o).get();
} else if (o instanceof DoubleWritable) {
return ((DoubleWritable) o).get();
- } else if (o instanceof DateWritable) {
- return ((DateWritable) o).get();
+ } else if (o instanceof DateWritableV2) {
+ return ((DateWritableV2) o).get();
} else if (o instanceof ByteWritable) {
return ((ByteWritable) o).get();
} else if (o instanceof ShortWritable) {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
index 7eee550688..c409e228a2 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
@@ -17,13 +17,12 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -269,8 +268,8 @@ protected void runTestTs(String ts1, String ts2, Double expDiff, GenericUDFMonth
protected void runTestDt(String dt1, String dt2, Double expDiff, GenericUDFMonthsBetween udf)
throws HiveException {
- DateWritable dtWr1 = dt1 == null ? null : new DateWritable(Date.valueOf(dt1));
- DateWritable dtWr2 = dt2 == null ? null : new DateWritable(Date.valueOf(dt2));
+ DateWritableV2 dtWr1 = dt1 == null ? null : new DateWritableV2(Date.valueOf(dt1));
+ DateWritableV2 dtWr2 = dt2 == null ? null : new DateWritableV2(Date.valueOf(dt2));
DeferredJavaObject valueObj1 = new DeferredJavaObject(dtWr1);
DeferredJavaObject valueObj2 = new DeferredJavaObject(dtWr2);
DeferredObject[] args = new DeferredObject[] { valueObj1, valueObj2 };
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
index 1402467cab..281b0d5e71 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFNullif.java
@@ -23,7 +23,7 @@
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -109,7 +109,7 @@ public void testNotSupportedArgumentMix() throws HiveException {
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDateObjectInspector,
PrimitiveObjectInspectorFactory.writableByteObjectInspector };
- DeferredObject[] args = { new DeferredJavaObject(new DateWritable(4)),
+ DeferredObject[] args = { new DeferredJavaObject(new DateWritableV2(4)),
new DeferredJavaObject(new ByteWritable((byte) 4)) };
udf.initialize(inputOIs);
@@ -123,8 +123,8 @@ public void testDateCompareEq() throws HiveException {
PrimitiveObjectInspectorFactory.writableDateObjectInspector,
PrimitiveObjectInspectorFactory.writableDateObjectInspector };
DeferredObject[] args = {
- new DeferredJavaObject(new DateWritable(4)),
- new DeferredJavaObject(new DateWritable(4))
+ new DeferredJavaObject(new DateWritableV2(4)),
+ new DeferredJavaObject(new DateWritableV2(4))
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
index efc951483c..69ba3008ad 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
@@ -18,19 +18,18 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
@@ -282,8 +281,8 @@ public void testIntervalYearMonthMinusIntervalYearMonth() throws Exception {
public void testDateMinusIntervalYearMonth() throws Exception {
GenericUDFOPMinus udf = new GenericUDFOPMinus();
- DateWritable left =
- new DateWritable(Date.valueOf("2004-02-15"));
+ DateWritableV2 left =
+ new DateWritableV2(Date.valueOf("2004-02-15"));
HiveIntervalYearMonthWritable right =
new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
ObjectInspector[] inputOIs = {
@@ -297,7 +296,7 @@ public void testDateMinusIntervalYearMonth() throws Exception {
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
- DateWritable res = (DateWritable) udf.evaluate(args);
+ DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
Assert.assertEquals(Date.valueOf("2001-06-15"), res.get());
}
@@ -374,8 +373,8 @@ public void testTimestampMinusIntervalDayTime() throws Exception {
public void testDateMinusIntervalDayTime() throws Exception {
GenericUDFOPMinus udf = new GenericUDFOPMinus();
- DateWritable left =
- new DateWritable(Date.valueOf("2001-01-01"));
+ DateWritableV2 left =
+ new DateWritableV2(Date.valueOf("2001-01-01"));
HiveIntervalDayTimeWritable right =
new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 0:0:0.555"));
ObjectInspector[] inputOIs = {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
index 5350a00e73..4536595316 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
@@ -18,19 +18,18 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
@@ -289,8 +288,8 @@ public void testIntervalYearMonthPlusDate() throws Exception {
HiveIntervalYearMonthWritable left =
new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
- DateWritable right =
- new DateWritable(Date.valueOf("2001-06-15"));
+ DateWritableV2 right =
+ new DateWritableV2(Date.valueOf("2001-06-15"));
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector,
PrimitiveObjectInspectorFactory.writableDateObjectInspector
@@ -302,7 +301,7 @@ public void testIntervalYearMonthPlusDate() throws Exception {
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
- DateWritable res = (DateWritable) udf.evaluate(args);
+ DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
}
@@ -310,8 +309,8 @@ public void testIntervalYearMonthPlusDate() throws Exception {
public void testDatePlusIntervalYearMonth() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
- DateWritable left =
- new DateWritable(Date.valueOf("2001-06-15"));
+ DateWritableV2 left =
+ new DateWritableV2(Date.valueOf("2001-06-15"));
HiveIntervalYearMonthWritable right =
new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
ObjectInspector[] inputOIs = {
@@ -325,7 +324,7 @@ public void testDatePlusIntervalYearMonth() throws Exception {
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
- DateWritable res = (DateWritable) udf.evaluate(args);
+ DateWritableV2 res = (DateWritableV2) udf.evaluate(args);
Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
}
@@ -450,8 +449,8 @@ public void testIntervalDayTimePlusDate() throws Exception {
HiveIntervalDayTimeWritable left =
new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
- DateWritable right =
- new DateWritable(Date.valueOf("2001-01-01"));
+ DateWritableV2 right =
+ new DateWritableV2(Date.valueOf("2001-01-01"));
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector,
PrimitiveObjectInspectorFactory.writableDateObjectInspector
@@ -472,8 +471,8 @@ public void testIntervalDayTimePlusDate() throws Exception {
public void testDatePlusIntervalDayTime() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
- DateWritable left =
- new DateWritable(Date.valueOf("2001-01-01"));
+ DateWritableV2 left =
+ new DateWritableV2(Date.valueOf("2001-01-01"));
HiveIntervalDayTimeWritable right =
new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
ObjectInspector[] inputOIs = {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java
index 4b09aa15d5..8b4bfed297 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFQuarter.java
@@ -17,15 +17,14 @@
*/
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
import junit.framework.TestCase;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -166,7 +165,7 @@ private void runAndVerifyStr(String str, Integer expResult, GenericUDF udf) thro
}
private void runAndVerifyDt(String str, Integer expResult, GenericUDF udf) throws HiveException {
- DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritable(
+ DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritableV2(
Date.valueOf(str)) : null);
DeferredObject[] args = { valueObj0 };
IntWritable output = (IntWritable) udf.evaluate(args);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java
index fcdb49cc27..5b6e2ab958 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFSortArray.java
@@ -18,14 +18,14 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static java.util.Arrays.asList;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -93,22 +93,22 @@ public void testSortStruct() throws HiveException {
udf.initialize(inputOIs);
Object i1 = asList(new Text("a"), new DoubleWritable(3.1415),
- new DateWritable(new Date(2015, 5, 26)),
+ new DateWritableV2(Date.of(2015, 5, 26)),
asList(new IntWritable(1), new IntWritable(3),
new IntWritable(2), new IntWritable(4)));
Object i2 = asList(new Text("b"), new DoubleWritable(3.14),
- new DateWritable(new Date(2015, 5, 26)),
+ new DateWritableV2(Date.of(2015, 5, 26)),
asList(new IntWritable(1), new IntWritable(3),
new IntWritable(2), new IntWritable(4)));
Object i3 = asList(new Text("a"), new DoubleWritable(3.1415),
- new DateWritable(new Date(2015, 5, 25)),
+ new DateWritableV2(Date.of(2015, 5, 25)),
asList(new IntWritable(1), new IntWritable(3),
new IntWritable(2), new IntWritable(5)));
Object i4 = asList(new Text("a"), new DoubleWritable(3.1415),
- new DateWritable(new Date(2015, 5, 25)),
+ new DateWritableV2(Date.of(2015, 5, 25)),
asList(new IntWritable(1), new IntWritable(3),
new IntWritable(2), new IntWritable(4)));
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java
index d840238ce5..ad2ffaa5a1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestamp.java
@@ -18,13 +18,12 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -66,12 +65,12 @@ public void testTimestamp() throws HiveException {
Timestamp ts = Timestamp.valueOf("1970-01-01 00:00:00");
runAndVerify(udf,
new TimestampWritable(ts),
- new LongWritable(ts.getTime() / 1000));
+ new LongWritable(ts.toEpochSecond()));
ts = Timestamp.valueOf("2001-02-03 01:02:03");
runAndVerify(udf,
new TimestampWritable(ts),
- new LongWritable(ts.getTime() / 1000));
+ new LongWritable(ts.toEpochSecond()));
// test null values
runAndVerify(udf, null, null);
@@ -85,8 +84,8 @@ public void testDate() throws HiveException {
Date date = Date.valueOf("1970-01-01");
runAndVerify(udf,
- new DateWritable(date),
- new LongWritable(date.getTime() / 1000));
+ new DateWritableV2(date),
+ new LongWritable(date.toEpochSecond()));
// test null values
runAndVerify(udf, null, null);
@@ -101,7 +100,7 @@ public void testString() throws HiveException {
String val = "2001-01-01 01:02:03";
runAndVerify(udf1,
new Text(val),
- new LongWritable(Timestamp.valueOf(val).getTime() / 1000));
+ new LongWritable(Timestamp.valueOf(val).toEpochSecond()));
// test null values
runAndVerify(udf1, null, null);
@@ -116,7 +115,7 @@ public void testString() throws HiveException {
runAndVerify(udf2,
new Text(val),
new Text(format),
- new LongWritable(Date.valueOf(val).getTime() / 1000));
+ new LongWritable(Date.valueOf(val).toEpochSecond()));
// test null values
runAndVerify(udf2, null, null, null);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java
index 0d524d31f3..f10674b863 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFTrunc.java
@@ -18,13 +18,12 @@
package org.apache.hadoop.hive.ql.udf.generic;
-import java.sql.Date;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -616,35 +615,35 @@ public void testDateWritableToDateWithMonthFormat() throws HiveException {
DeferredObject[] evalArgs;
// test date string
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-01")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-01")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-14")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-14")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-31")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-31")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-02")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-02")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-28")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-02-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-03")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-03")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-28")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-29")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-29")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-02-01", udf, initArgs, evalArgs);
}
@@ -661,47 +660,47 @@ public void testDateWritableToDateWithQuarterFormat() throws HiveException {
DeferredObject[] evalArgs;
// test date string
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-01")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-01")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-14")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-14")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-31")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-31")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-02")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-02")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-28")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-03")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-03")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-28")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-29")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-29")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-05-11")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-05-11")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-04-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-07-01")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-07-01")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-07-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-12-31")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-12-31")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-10-01", udf, initArgs, evalArgs);
}
@@ -718,35 +717,35 @@ public void testDateWritableToDateWithYearFormat() throws HiveException {
DeferredObject[] evalArgs;
// test date string
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-01")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-01")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-14")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-14")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-01-31")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-01-31")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-02")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-02")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2014-02-28")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2014-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2014-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-03")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-03")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-28")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-28")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
- valueObj0 = new DeferredJavaObject(new DateWritable(Date.valueOf("2016-02-29")));
+ valueObj0 = new DeferredJavaObject(new DateWritableV2(Date.valueOf("2016-02-29")));
evalArgs = new DeferredObject[] { valueObj0, valueObjFmt };
runAndVerify("2016-01-01", udf, initArgs, evalArgs);
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java b/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java
index 85bb9b3015..4005fe42ef 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java
@@ -17,11 +17,11 @@
*/
package org.apache.hadoop.hive.ql.util;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.TimeZone;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.ql.util.DateTimeMath;
import org.junit.*;
diff --git a/ql/src/test/queries/clientpositive/date_udf.q b/ql/src/test/queries/clientpositive/date_udf.q
index cddfc1abb4..aa33b7a524 100644
--- a/ql/src/test/queries/clientpositive/date_udf.q
+++ b/ql/src/test/queries/clientpositive/date_udf.q
@@ -21,7 +21,7 @@ create table date_udf_flight (
LOAD DATA LOCAL INPATH '../../data/files/flights_tiny.txt.1' OVERWRITE INTO TABLE date_udf_flight;
-- Test UDFs with date input
-select unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
+select unix_timestamp(cast(d as timestamp with local time zone)), unix_timestamp(d), year(d), month(d), day(d), dayofmonth(d),
weekofyear(d), to_date(d)
from date_udf;
diff --git a/ql/src/test/queries/clientpositive/druid_timestamptz2.q b/ql/src/test/queries/clientpositive/druid_timestamptz2.q
new file mode 100644
index 0000000000..8f573c8c9b
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/druid_timestamptz2.q
@@ -0,0 +1,60 @@
+CREATE database druid_test_dst;
+use druid_test_dst;
+
+
+
+create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double);
+insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4);
+insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1);
+insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4);
+insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1);
+insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2);
+insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2);
+
+CREATE TABLE druid_test_table_1
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+AS
+select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
+FROM druid_test_dst.test_base_table;
+
+select * FROM druid_test_table_1;
+
+CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY");
+
+
+insert into druid_test_table_2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4);
+insert into druid_test_table_2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1);
+insert into druid_test_table_2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4);
+insert into druid_test_table_2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1);
+insert into druid_test_table_2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2);
+insert into druid_test_table_2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2);
+
+select * FROM druid_test_table_2;
+
+SET TIME ZONE UTC;
+
+CREATE TABLE druid_test_table_utc
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+AS
+select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
+FROM druid_test_dst.test_base_table;
+
+select * FROM druid_test_table_utc;
+
+CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY");
+
+
+insert into druid_test_table_utc2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4);
+insert into druid_test_table_utc2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1);
+insert into druid_test_table_utc2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4);
+insert into druid_test_table_utc2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1);
+insert into druid_test_table_utc2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2);
+insert into druid_test_table_utc2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2);
+
+select * FROM druid_test_table_utc2;
diff --git a/ql/src/test/queries/clientpositive/localtimezone.q b/ql/src/test/queries/clientpositive/localtimezone.q
index 27b036bab3..7456972aa2 100644
--- a/ql/src/test/queries/clientpositive/localtimezone.q
+++ b/ql/src/test/queries/clientpositive/localtimezone.q
@@ -5,35 +5,35 @@ drop table `timestamptz_test`;
create table `date_test` (`mydate1` date);
insert into `date_test` VALUES
- ('2011-01-01 01:01:01.123'),
- ('2011-01-01 01:01:01.123 Europe/Rome'),
- ('2011-01-01 01:01:01.123 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912'),
- ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
- ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912 xyz');
+ (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone));
create table `timestamp_test` (`mydate1` timestamp);
insert into `timestamp_test` VALUES
- ('2011-01-01 01:01:01.123'),
- ('2011-01-01 01:01:01.123 Europe/Rome'),
- ('2011-01-01 01:01:01.123 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912'),
- ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
- ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912 xyz');
+ (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone));
create table `timestamptz_test` (`mydate1` timestamp with local time zone);
insert into `timestamptz_test` VALUES
- ('2011-01-01 01:01:01.123'),
- ('2011-01-01 01:01:01.123 Europe/Rome'),
- ('2011-01-01 01:01:01.123 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912'),
- ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
- ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912 xyz');
+ (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone));
select * from `date_test`;
select * from `timestamp_test`;
diff --git a/ql/src/test/queries/clientpositive/localtimezone2.q b/ql/src/test/queries/clientpositive/localtimezone2.q
new file mode 100644
index 0000000000..911fca0d0c
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/localtimezone2.q
@@ -0,0 +1,55 @@
+drop table `table_tsltz`;
+
+CREATE TABLE table_tsltz (tz VARCHAR(200),
+ c_ts1 TIMESTAMP,
+ c_ts2 TIMESTAMP,
+ c_tsltz1 TIMESTAMP WITH LOCAL TIME ZONE,
+ c_tsltz2 TIMESTAMP WITH LOCAL TIME ZONE);
+
+set time zone GMT-08:00;
+
+insert into table_tsltz values (
+ '-08:00',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone));
+
+set time zone UTC;
+
+insert into table_tsltz values (
+ 'UTC',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone));
+
+set time zone GMT+02:00;
+
+insert into table_tsltz values (
+ '+02:00',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone));
+
+set time zone US/Pacific;
+
+insert into table_tsltz values (
+ 'US/Pacific',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone));
+
+select tz,
+ c_ts1, c_ts2,
+ cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
+from table_tsltz;
+
+set time zone UTC;
+
+select tz,
+ c_ts1, c_ts2,
+ cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
+from table_tsltz;
diff --git a/ql/src/test/queries/clientpositive/parquet_ppd_char.q b/ql/src/test/queries/clientpositive/parquet_ppd_char.q
index 386fb2589f..4230d8c1dd 100644
--- a/ql/src/test/queries/clientpositive/parquet_ppd_char.q
+++ b/ql/src/test/queries/clientpositive/parquet_ppd_char.q
@@ -1,6 +1,7 @@
--! qt:dataset:src1
--! qt:dataset:src
+set hive.parquet.timestamp.skip.conversion=true;
set hive.vectorized.execution.enabled=false;
SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
SET hive.optimize.ppd=true;
diff --git a/ql/src/test/queries/clientpositive/parquet_vectorization_13.q b/ql/src/test/queries/clientpositive/parquet_vectorization_13.q
index e5f48c8c65..0b23f505f4 100644
--- a/ql/src/test/queries/clientpositive/parquet_vectorization_13.q
+++ b/ql/src/test/queries/clientpositive/parquet_vectorization_13.q
@@ -32,8 +32,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -64,8 +64,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -98,8 +98,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -130,8 +130,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
diff --git a/ql/src/test/queries/clientpositive/parquet_vectorization_7.q b/ql/src/test/queries/clientpositive/parquet_vectorization_7.q
index d0a73a3618..55f21af012 100644
--- a/ql/src/test/queries/clientpositive/parquet_vectorization_7.q
+++ b/ql/src/test/queries/clientpositive/parquet_vectorization_7.q
@@ -24,11 +24,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25;
@@ -50,11 +50,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25;
@@ -79,11 +79,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25;
@@ -105,11 +105,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25;
diff --git a/ql/src/test/queries/clientpositive/singletsinsertorc.q b/ql/src/test/queries/clientpositive/singletsinsertorc.q
new file mode 100644
index 0000000000..3e4c43eb1f
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/singletsinsertorc.q
@@ -0,0 +1,6 @@
+CREATE TABLE myorctable(ts timestamp)
+STORED AS ORC;
+
+INSERT INTO myorctable VALUES ('1970-01-01 00:00:00');
+
+SELECT * FROM myorctable;
diff --git a/ql/src/test/queries/clientpositive/timestamp_comparison2.q b/ql/src/test/queries/clientpositive/timestamp_comparison2.q
index affc36fee8..dfb4fd7ec2 100644
--- a/ql/src/test/queries/clientpositive/timestamp_comparison2.q
+++ b/ql/src/test/queries/clientpositive/timestamp_comparison2.q
@@ -17,8 +17,8 @@ FROM alltypesorc
WHERE
((ctinyint != 0)
AND
- (((ctimestamp1 <= timestamp('1969-12-31 16:00:00'))
+ (((ctimestamp1 <= timestamp('1970-01-01 00:00:00'))
OR ((ctinyint = cint) OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > timestamp('1969-12-31 07:55:29')) AND (3569 >= cdouble)))))
+ OR ((ctimestamp2 > timestamp('1969-12-31 15:55:29')) AND (3569 >= cdouble)))))
;
diff --git a/ql/src/test/queries/clientpositive/timestamp_dst.q b/ql/src/test/queries/clientpositive/timestamp_dst.q
new file mode 100644
index 0000000000..4dda5a9d28
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/timestamp_dst.q
@@ -0,0 +1,2 @@
+select TIMESTAMP '2015-03-08 02:10:00.101';
+
diff --git a/ql/src/test/queries/clientpositive/udf_reflect2.q b/ql/src/test/queries/clientpositive/udf_reflect2.q
index 7ebe91436a..7810746762 100644
--- a/ql/src/test/queries/clientpositive/udf_reflect2.q
+++ b/ql/src/test/queries/clientpositive/udf_reflect2.q
@@ -36,7 +36,7 @@ SELECT key,
reflect2(ts, "getHours"),
reflect2(ts, "getMinutes"),
reflect2(ts, "getSeconds"),
- reflect2(ts, "getTime")
+ reflect2(ts, "toEpochMilli")
FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5;
@@ -70,5 +70,5 @@ SELECT key,
reflect2(ts, "getHours"),
reflect2(ts, "getMinutes"),
reflect2(ts, "getSeconds"),
- reflect2(ts, "getTime")
+ reflect2(ts, "toEpochMilli")
FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5;
diff --git a/ql/src/test/queries/clientpositive/vectorization_13.q b/ql/src/test/queries/clientpositive/vectorization_13.q
index dd7981db03..b9e3fa20c0 100644
--- a/ql/src/test/queries/clientpositive/vectorization_13.q
+++ b/ql/src/test/queries/clientpositive/vectorization_13.q
@@ -32,8 +32,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -64,8 +64,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -98,8 +98,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -130,8 +130,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
diff --git a/ql/src/test/queries/clientpositive/vectorization_7.q b/ql/src/test/queries/clientpositive/vectorization_7.q
index ac0cc1009e..855e2cf34e 100644
--- a/ql/src/test/queries/clientpositive/vectorization_7.q
+++ b/ql/src/test/queries/clientpositive/vectorization_7.q
@@ -24,11 +24,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25;
@@ -50,11 +50,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25;
@@ -79,11 +79,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25;
@@ -105,11 +105,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25;
diff --git a/ql/src/test/queries/clientpositive/vectorization_decimal_date.q b/ql/src/test/queries/clientpositive/vectorization_decimal_date.q
index c38ef09af4..68e5576e46 100644
--- a/ql/src/test/queries/clientpositive/vectorization_decimal_date.q
+++ b/ql/src/test/queries/clientpositive/vectorization_decimal_date.q
@@ -4,5 +4,6 @@ set hive.fetch.task.conversion=none;
CREATE TABLE date_decimal_test STORED AS ORC AS SELECT cint, cdouble, CAST (CAST (cint AS TIMESTAMP) AS DATE) AS cdate, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal FROM alltypesorc;
SET hive.vectorized.execution.enabled=true;
-EXPLAIN VECTORIZATION EXPRESSION SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10;
-SELECT cdate, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10;
+EXPLAIN VECTORIZATION EXPRESSION SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10;
+-- 528534767 is 'Wednesday, January 7, 1970 2:48:54 AM'
+SELECT cdate, cint, cdecimal from date_decimal_test where cint IS NOT NULL AND cdouble IS NOT NULL LIMIT 10;
diff --git a/ql/src/test/results/clientpositive/constprog_type.q.out b/ql/src/test/results/clientpositive/constprog_type.q.out
index 99a474662f..a3f63580a5 100644
--- a/ql/src/test/results/clientpositive/constprog_type.q.out
+++ b/ql/src/test/results/clientpositive/constprog_type.q.out
@@ -35,7 +35,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: DATE'2013-11-17' (type: date), TIMESTAMP'2011-04-29 20:46:56.4485' (type: timestamp)
+ expressions: DATE'2013-11-17' (type: date), TIMESTAMP'2011-04-30 03:46:56.4485' (type: timestamp)
outputColumnNames: _col0, _col1
Statistics: Num rows: 500 Data size: 48000 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
@@ -123,7 +123,7 @@ POSTHOOK: query: SELECT * FROM dest1_n26
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dest1_n26
#### A masked pattern was here ####
-2013-11-17 2011-04-29 20:46:56.4485
+2013-11-17 2011-04-30 03:46:56.4485
PREHOOK: query: SELECT key, value FROM src WHERE key = cast(86 as double)
PREHOOK: type: QUERY
PREHOOK: Input: default@src
diff --git a/ql/src/test/results/clientpositive/decimal_1.q.out b/ql/src/test/results/clientpositive/decimal_1.q.out
index f5c92f3d2c..37eded2851 100644
--- a/ql/src/test/results/clientpositive/decimal_1.q.out
+++ b/ql/src/test/results/clientpositive/decimal_1.q.out
@@ -120,8 +120,13 @@ POSTHOOK: query: select cast(t as timestamp) from decimal_1_n0
POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1_n0
#### A masked pattern was here ####
+<<<<<<< HEAD
1969-12-31 16:00:17.29
PREHOOK: query: drop table decimal_1_n0
+=======
+1970-01-01 00:00:17.29
+PREHOOK: query: drop table decimal_1
+>>>>>>> HIVE-12192
PREHOOK: type: DROPTABLE
PREHOOK: Input: default@decimal_1_n0
PREHOOK: Output: default@decimal_1_n0
diff --git a/ql/src/test/results/clientpositive/decimal_2.q.out b/ql/src/test/results/clientpositive/decimal_2.q.out
index 56e08d735d..ba37cf283b 100644
--- a/ql/src/test/results/clientpositive/decimal_2.q.out
+++ b/ql/src/test/results/clientpositive/decimal_2.q.out
@@ -210,8 +210,13 @@ POSTHOOK: query: select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as
POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_2_n1
#### A masked pattern was here ####
+<<<<<<< HEAD
1355944339.1234567
PREHOOK: query: select cast(true as decimal) from decimal_2_n1
+=======
+1355915539.1234567
+PREHOOK: query: select cast(true as decimal) from decimal_2
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@decimal_2_n1
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out b/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out
new file mode 100644
index 0000000000..c71a435fee
--- /dev/null
+++ b/ql/src/test/results/clientpositive/druid/druid_timestamptz2.q.out
@@ -0,0 +1,308 @@
+PREHOOK: query: CREATE database druid_test_dst
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:druid_test_dst
+POSTHOOK: query: CREATE database druid_test_dst
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:druid_test_dst
+PREHOOK: query: use druid_test_dst
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:druid_test_dst
+POSTHOOK: query: use druid_test_dst
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:druid_test_dst
+PREHOOK: query: create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:druid_test_dst
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: create table test_base_table(`timecolumn` timestamp, `interval_marker` string, `num_l` double)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:druid_test_dst
+POSTHOOK: Output: druid_test_dst@test_base_table
+PREHOOK: query: insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+PREHOOK: query: insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+PREHOOK: query: insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+PREHOOK: query: insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+PREHOOK: query: insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+PREHOOK: query: insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.interval_marker SCRIPT []
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+PREHOOK: query: CREATE TABLE druid_test_table_1
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+AS
+select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
+FROM druid_test_dst.test_base_table
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: druid_test_dst@test_base_table
+PREHOOK: Output: database:druid_test_dst
+PREHOOK: Output: druid_test_dst@druid_test_table_1
+POSTHOOK: query: CREATE TABLE druid_test_table_1
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+AS
+select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
+FROM druid_test_dst.test_base_table
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: druid_test_dst@test_base_table
+POSTHOOK: Output: database:druid_test_dst
+POSTHOOK: Output: druid_test_dst@druid_test_table_1
+POSTHOOK: Lineage: druid_test_table_1.__time EXPRESSION [(test_base_table)test_base_table.FieldSchema(name:timecolumn, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: druid_test_table_1.interval_marker SIMPLE [(test_base_table)test_base_table.FieldSchema(name:interval_marker, type:string, comment:null), ]
+POSTHOOK: Lineage: druid_test_table_1.num_l SIMPLE [(test_base_table)test_base_table.FieldSchema(name:num_l, type:double, comment:null), ]
+PREHOOK: query: select * FROM druid_test_table_1
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table_1
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * FROM druid_test_table_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table_1
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-08 00:00:00.0 US/Pacific i1-start 4.0
+2015-03-08 23:59:59.0 US/Pacific i1-end 1.0
+2015-03-09 00:00:00.0 US/Pacific i2-start 4.0
+2015-03-09 23:59:59.0 US/Pacific i2-end 1.0
+2015-03-10 00:00:00.0 US/Pacific i3-start 2.0
+2015-03-10 23:59:59.0 US/Pacific i3-end 2.0
+PREHOOK: query: CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:druid_test_dst
+PREHOOK: Output: druid_test_dst@druid_test_table_2
+POSTHOOK: query: CREATE TABLE druid_test_table_2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:druid_test_dst
+POSTHOOK: Output: druid_test_dst@druid_test_table_2
+PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_2
+POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_2
+PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_2
+POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_2
+PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_2
+POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_2
+PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_2
+POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_2
+PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_2
+POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_2
+PREHOOK: query: insert into druid_test_table_2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_2
+POSTHOOK: query: insert into druid_test_table_2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_2
+PREHOOK: query: select * FROM druid_test_table_2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table_2
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * FROM druid_test_table_2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table_2
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-08 00:00:00.0 US/Pacific i1-start 4.0
+2015-03-08 23:59:59.0 US/Pacific i1-end 1.0
+2015-03-09 00:00:00.0 US/Pacific i2-start 4.0
+2015-03-09 23:59:59.0 US/Pacific i2-end 1.0
+2015-03-10 00:00:00.0 US/Pacific i3-start 2.0
+2015-03-10 23:59:59.0 US/Pacific i3-end 2.0
+PREHOOK: query: CREATE TABLE druid_test_table_utc
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+AS
+select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
+FROM druid_test_dst.test_base_table
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: druid_test_dst@test_base_table
+PREHOOK: Output: database:druid_test_dst
+PREHOOK: Output: druid_test_dst@druid_test_table_utc
+POSTHOOK: query: CREATE TABLE druid_test_table_utc
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+AS
+select cast(`timecolumn` as timestamp with local time zone) as `__time`, `interval_marker`, `num_l`
+FROM druid_test_dst.test_base_table
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: druid_test_dst@test_base_table
+POSTHOOK: Output: database:druid_test_dst
+POSTHOOK: Output: druid_test_dst@druid_test_table_utc
+POSTHOOK: Lineage: druid_test_table_utc.__time EXPRESSION [(test_base_table)test_base_table.FieldSchema(name:timecolumn, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: druid_test_table_utc.interval_marker SIMPLE [(test_base_table)test_base_table.FieldSchema(name:interval_marker, type:string, comment:null), ]
+POSTHOOK: Lineage: druid_test_table_utc.num_l SIMPLE [(test_base_table)test_base_table.FieldSchema(name:num_l, type:double, comment:null), ]
+PREHOOK: query: select * FROM druid_test_table_utc
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table_utc
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * FROM druid_test_table_utc
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table_utc
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-08 00:00:00.0 UTC i1-start 4.0
+2015-03-08 23:59:59.0 UTC i1-end 1.0
+2015-03-09 00:00:00.0 UTC i2-start 4.0
+2015-03-09 23:59:59.0 UTC i2-end 1.0
+2015-03-10 00:00:00.0 UTC i3-start 2.0
+2015-03-10 23:59:59.0 UTC i3-end 2.0
+PREHOOK: query: CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:druid_test_dst
+PREHOOK: Output: druid_test_dst@druid_test_table_utc2
+POSTHOOK: query: CREATE TABLE druid_test_table_utc2 (`__time` timestamp with local time zone, `interval_marker` string, `num_l` double)
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:druid_test_dst
+POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
+PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_utc2
+POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-08 00:00:00' as timestamp with local time zone), 'i1-start', 4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
+PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_utc2
+POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-08 23:59:59' as timestamp with local time zone), 'i1-end', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
+PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_utc2
+POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-09 00:00:00' as timestamp with local time zone), 'i2-start', 4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
+PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_utc2
+POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-09 23:59:59' as timestamp with local time zone), 'i2-end', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
+PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_utc2
+POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-10 00:00:00' as timestamp with local time zone), 'i3-start', 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
+PREHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@druid_test_table_utc2
+POSTHOOK: query: insert into druid_test_table_utc2 values (cast('2015-03-10 23:59:59' as timestamp with local time zone), 'i3-end', 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@druid_test_table_utc2
+PREHOOK: query: select * FROM druid_test_table_utc2
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table_utc2
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * FROM druid_test_table_utc2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table_utc2
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-08 00:00:00.0 UTC i1-start 4.0
+2015-03-08 23:59:59.0 UTC i1-end 1.0
+2015-03-09 00:00:00.0 UTC i2-start 4.0
+2015-03-09 23:59:59.0 UTC i2-end 1.0
+2015-03-10 00:00:00.0 UTC i3-start 2.0
+2015-03-10 23:59:59.0 UTC i3-end 2.0
diff --git a/ql/src/test/results/clientpositive/foldts.q.out b/ql/src/test/results/clientpositive/foldts.q.out
index 2a82b9a113..83a6481601 100644
--- a/ql/src/test/results/clientpositive/foldts.q.out
+++ b/ql/src/test/results/clientpositive/foldts.q.out
@@ -45,7 +45,7 @@ POSTHOOK: query: select ctimestamp1, unix_timestamp(ctimestamp1), to_unix_timest
POSTHOOK: type: QUERY
POSTHOOK: Input: default@alltypesorc
#### A masked pattern was here ####
-1969-12-31 15:59:46.674 -13 -13
+1969-12-31 15:59:46.674 -28813 -28813
PREHOOK: query: create temporary table src1orc stored as orc as select * from src1
PREHOOK: type: CREATETABLE_AS_SELECT
PREHOOK: Input: default@src1
diff --git a/ql/src/test/results/clientpositive/interval_arithmetic.q.out b/ql/src/test/results/clientpositive/interval_arithmetic.q.out
index 7cb7270c29..819f1fee77 100644
--- a/ql/src/test/results/clientpositive/interval_arithmetic.q.out
+++ b/ql/src/test/results/clientpositive/interval_arithmetic.q.out
@@ -174,7 +174,7 @@ limit 2
POSTHOOK: type: QUERY
POSTHOOK: Input: default@interval_arithmetic_1_n0
#### A masked pattern was here ####
-1969-12-31 -10749 23:00:00.000000000 10749 23:00:00.000000000 0 00:00:00.000000000
+1969-12-31 -10750 00:00:00.000000000 10750 00:00:00.000000000 0 00:00:00.000000000
NULL NULL NULL NULL
PREHOOK: query: explain
select
@@ -396,7 +396,7 @@ limit 2
POSTHOOK: type: QUERY
POSTHOOK: Input: default@interval_arithmetic_1_n0
#### A masked pattern was here ####
-1969-12-31 1969-09-22 13:37:26.876543211 1970-04-09 11:22:33.123456789 1970-04-09 11:22:33.123456789 1969-09-22 13:37:26.876543211 1969-09-22 13:37:26.876543211 1970-04-09 11:22:33.123456789
+1969-12-31 1969-09-22 12:37:26.876543211 1970-04-09 11:22:33.123456789 1970-04-09 11:22:33.123456789 1969-09-22 12:37:26.876543211 1969-09-22 12:37:26.876543211 1970-04-09 11:22:33.123456789
NULL NULL NULL NULL NULL NULL NULL
PREHOOK: query: explain
select
@@ -558,7 +558,7 @@ limit 2
POSTHOOK: type: QUERY
POSTHOOK: Input: default@interval_arithmetic_1_n0
#### A masked pattern was here ####
-1969-12-31 15:59:46.674 1969-09-23 05:37:13.550543211 1970-04-10 03:22:19.797456789 1970-04-10 03:22:19.797456789 1969-09-23 05:37:13.550543211 1969-09-23 05:37:13.550543211 1970-04-10 03:22:19.797456789
+1969-12-31 15:59:46.674 1969-09-23 04:37:13.550543211 1970-04-10 03:22:19.797456789 1970-04-10 03:22:19.797456789 1969-09-23 04:37:13.550543211 1969-09-23 04:37:13.550543211 1970-04-10 03:22:19.797456789
NULL NULL NULL NULL NULL NULL NULL
PREHOOK: query: explain
select
@@ -630,7 +630,7 @@ STAGE PLANS:
alias: interval_arithmetic_1_n0
Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: TIMESTAMP'2016-11-11 03:04:00.0' (type: timestamp)
+ expressions: TIMESTAMP'2016-11-11 03:04:00' (type: timestamp)
outputColumnNames: _col0
Statistics: Num rows: 12288 Data size: 491520 Basic stats: COMPLETE Column stats: COMPLETE
Limit
diff --git a/ql/src/test/results/clientpositive/llap/results_cache_2.q.out b/ql/src/test/results/clientpositive/llap/results_cache_2.q.out
index a1b24855aa..dc140bf256 100644
--- a/ql/src/test/results/clientpositive/llap/results_cache_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/results_cache_2.q.out
@@ -103,7 +103,7 @@ group by c1
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
#### A masked pattern was here ####
-2012-01-01 01:02:03 10
+2012-01-01 09:02:03 10
test.comment=Queries using non-deterministic functions should not use results cache
PREHOOK: query: explain
select c1, count(*)
@@ -139,7 +139,7 @@ STAGE PLANS:
Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
Group By Operator
aggregations: count()
- keys: TIMESTAMP'2012-01-01 01:02:03.0' (type: timestamp)
+ keys: TIMESTAMP'2012-01-01 09:02:03.0' (type: timestamp)
mode: hash
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
@@ -161,7 +161,7 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: TIMESTAMP'2012-01-01 01:02:03.0' (type: timestamp), _col1 (type: bigint)
+ expressions: TIMESTAMP'2012-01-01 09:02:03.0' (type: timestamp), _col1 (type: bigint)
outputColumnNames: _col0, _col1
Statistics: Num rows: 1 Data size: 48 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
diff --git a/ql/src/test/results/clientpositive/llap/singletsinsertorc.q.out b/ql/src/test/results/clientpositive/llap/singletsinsertorc.q.out
new file mode 100644
index 0000000000..0322eca245
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/singletsinsertorc.q.out
@@ -0,0 +1,28 @@
+PREHOOK: query: CREATE TABLE myorctable(ts timestamp)
+STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@myorctable
+POSTHOOK: query: CREATE TABLE myorctable(ts timestamp)
+STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@myorctable
+PREHOOK: query: INSERT INTO myorctable VALUES ('1970-01-01 00:00:00')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@myorctable
+POSTHOOK: query: INSERT INTO myorctable VALUES ('1970-01-01 00:00:00')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@myorctable
+POSTHOOK: Lineage: myorctable.ts SCRIPT []
+PREHOOK: query: SELECT * FROM myorctable
+PREHOOK: type: QUERY
+PREHOOK: Input: default@myorctable
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM myorctable
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@myorctable
+#### A masked pattern was here ####
+1970-01-01 00:00:00
diff --git a/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out b/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out
index 87993d2b49..4c3c42b919 100644
--- a/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_interval_arithmetic.q.out
@@ -175,7 +175,7 @@ POSTHOOK: Input: default@interval_arithmetic_1
dateval _c1 _c2 _c3 _c4 _c5 _c6
0004-09-22 0002-07-22 0006-11-22 0006-11-22 0002-07-22 0002-07-22 0006-11-22
0528-10-27 0526-08-27 0530-12-27 0530-12-27 0526-08-27 0526-08-27 0530-12-27
-1319-02-02 1316-12-02 1321-04-02 1321-04-02 1316-12-02 1316-12-02 1321-04-02
+1319-02-02 1316-12-03 1321-04-02 1321-04-02 1316-12-03 1316-12-03 1321-04-02
1404-07-23 1402-05-23 1406-09-23 1406-09-23 1402-05-23 1402-05-23 1406-09-23
1815-05-06 1813-03-06 1817-07-06 1817-07-06 1813-03-06 1813-03-06 1817-07-06
1883-04-17 1881-02-17 1885-06-17 1885-06-17 1881-02-17 1881-02-17 1885-06-17
@@ -272,7 +272,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [0, 3, 4, 5]
- selectExpressions: DateColSubtractDateScalar(col 0:date, val 1999-06-07 00:00:00.0) -> 3:interval_day_time, DateScalarSubtractDateColumn(val 1999-06-07 00:00:00.0, col 0:date) -> 4:interval_day_time, DateColSubtractDateColumn(col 0:date, col 0:date) -> 5:interval_day_time
+ selectExpressions: DateColSubtractDateScalar(col 0:date, val 1999-06-07) -> 3:interval_day_time, DateScalarSubtractDateColumn(val 1999-06-07, col 0:date) -> 4:interval_day_time, DateColSubtractDateColumn(col 0:date, col 0:date) -> 5:interval_day_time
Statistics: Num rows: 50 Data size: 2744 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: date)
@@ -349,56 +349,56 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@interval_arithmetic_1
#### A masked pattern was here ####
dateval _c1 _c2 _c3
-0004-09-22 -728552 23:00:00.000000000 728552 23:00:00.000000000 0 00:00:00.000000000
-0528-10-27 -537126 23:00:00.000000000 537126 23:00:00.000000000 0 00:00:00.000000000
-1319-02-02 -248481 23:00:00.000000000 248481 23:00:00.000000000 0 00:00:00.000000000
-1404-07-23 -217263 23:00:00.000000000 217263 23:00:00.000000000 0 00:00:00.000000000
-1815-05-06 -67236 23:00:00.000000000 67236 23:00:00.000000000 0 00:00:00.000000000
-1883-04-17 -42418 23:00:00.000000000 42418 23:00:00.000000000 0 00:00:00.000000000
+0004-09-22 -728551 00:00:00.000000000 728551 00:00:00.000000000 0 00:00:00.000000000
+0528-10-27 -537129 00:00:00.000000000 537129 00:00:00.000000000 0 00:00:00.000000000
+1319-02-02 -248490 00:00:00.000000000 248490 00:00:00.000000000 0 00:00:00.000000000
+1404-07-23 -217273 00:00:00.000000000 217273 00:00:00.000000000 0 00:00:00.000000000
+1815-05-06 -67237 00:00:00.000000000 67237 00:00:00.000000000 0 00:00:00.000000000
+1883-04-17 -42419 00:00:00.000000000 42419 00:00:00.000000000 0 00:00:00.000000000
1966-08-16 -11983 00:00:00.000000000 11983 00:00:00.000000000 0 00:00:00.000000000
-1973-04-17 -9546 23:00:00.000000000 9546 23:00:00.000000000 0 00:00:00.000000000
+1973-04-17 -9547 00:00:00.000000000 9547 00:00:00.000000000 0 00:00:00.000000000
1974-10-04 -9012 00:00:00.000000000 9012 00:00:00.000000000 0 00:00:00.000000000
-1976-03-03 -8495 23:00:00.000000000 8495 23:00:00.000000000 0 00:00:00.000000000
+1976-03-03 -8496 00:00:00.000000000 8496 00:00:00.000000000 0 00:00:00.000000000
1976-05-06 -8432 00:00:00.000000000 8432 00:00:00.000000000 0 00:00:00.000000000
1978-08-05 -7611 00:00:00.000000000 7611 00:00:00.000000000 0 00:00:00.000000000
-1981-04-25 -6616 23:00:00.000000000 6616 23:00:00.000000000 0 00:00:00.000000000
-1981-11-15 -6412 23:00:00.000000000 6412 23:00:00.000000000 0 00:00:00.000000000
+1981-04-25 -6617 00:00:00.000000000 6617 00:00:00.000000000 0 00:00:00.000000000
+1981-11-15 -6413 00:00:00.000000000 6413 00:00:00.000000000 0 00:00:00.000000000
1985-07-20 -5070 00:00:00.000000000 5070 00:00:00.000000000 0 00:00:00.000000000
-1985-11-18 -4948 23:00:00.000000000 4948 23:00:00.000000000 0 00:00:00.000000000
-1987-02-21 -4488 23:00:00.000000000 4488 23:00:00.000000000 0 00:00:00.000000000
+1985-11-18 -4949 00:00:00.000000000 4949 00:00:00.000000000 0 00:00:00.000000000
+1987-02-21 -4489 00:00:00.000000000 4489 00:00:00.000000000 0 00:00:00.000000000
1987-05-28 -4393 00:00:00.000000000 4393 00:00:00.000000000 0 00:00:00.000000000
1998-10-16 -234 00:00:00.000000000 234 00:00:00.000000000 0 00:00:00.000000000
1999-10-03 118 00:00:00.000000000 -118 00:00:00.000000000 0 00:00:00.000000000
-2000-12-18 560 01:00:00.000000000 -560 01:00:00.000000000 0 00:00:00.000000000
+2000-12-18 560 00:00:00.000000000 -560 00:00:00.000000000 0 00:00:00.000000000
2002-05-10 1068 00:00:00.000000000 -1068 00:00:00.000000000 0 00:00:00.000000000
2003-09-23 1569 00:00:00.000000000 -1569 00:00:00.000000000 0 00:00:00.000000000
-2004-03-07 1735 01:00:00.000000000 -1735 01:00:00.000000000 0 00:00:00.000000000
-2007-02-09 2804 01:00:00.000000000 -2804 01:00:00.000000000 0 00:00:00.000000000
-2009-01-21 3516 01:00:00.000000000 -3516 01:00:00.000000000 0 00:00:00.000000000
+2004-03-07 1735 00:00:00.000000000 -1735 00:00:00.000000000 0 00:00:00.000000000
+2007-02-09 2804 00:00:00.000000000 -2804 00:00:00.000000000 0 00:00:00.000000000
+2009-01-21 3516 00:00:00.000000000 -3516 00:00:00.000000000 0 00:00:00.000000000
2010-04-08 3958 00:00:00.000000000 -3958 00:00:00.000000000 0 00:00:00.000000000
2013-04-07 5053 00:00:00.000000000 -5053 00:00:00.000000000 0 00:00:00.000000000
2013-04-10 5056 00:00:00.000000000 -5056 00:00:00.000000000 0 00:00:00.000000000
2021-09-24 8145 00:00:00.000000000 -8145 00:00:00.000000000 0 00:00:00.000000000
-2024-11-11 9289 01:00:00.000000000 -9289 01:00:00.000000000 0 00:00:00.000000000
+2024-11-11 9289 00:00:00.000000000 -9289 00:00:00.000000000 0 00:00:00.000000000
4143-07-08 783111 00:00:00.000000000 -783111 00:00:00.000000000 0 00:00:00.000000000
-4966-12-04 1083855 01:00:00.000000000 -1083855 01:00:00.000000000 0 00:00:00.000000000
-5339-02-01 1219784 01:00:00.000000000 -1219784 01:00:00.000000000 0 00:00:00.000000000
+4966-12-04 1083855 00:00:00.000000000 -1083855 00:00:00.000000000 0 00:00:00.000000000
+5339-02-01 1219784 00:00:00.000000000 -1219784 00:00:00.000000000 0 00:00:00.000000000
5344-10-04 1221856 00:00:00.000000000 -1221856 00:00:00.000000000 0 00:00:00.000000000
5397-07-13 1241131 00:00:00.000000000 -1241131 00:00:00.000000000 0 00:00:00.000000000
5966-07-09 1448949 00:00:00.000000000 -1448949 00:00:00.000000000 0 00:00:00.000000000
6229-06-28 1544997 00:00:00.000000000 -1544997 00:00:00.000000000 0 00:00:00.000000000
6482-04-27 1637342 00:00:00.000000000 -1637342 00:00:00.000000000 0 00:00:00.000000000
-6631-11-13 1691962 01:00:00.000000000 -1691962 01:00:00.000000000 0 00:00:00.000000000
+6631-11-13 1691962 00:00:00.000000000 -1691962 00:00:00.000000000 0 00:00:00.000000000
6705-09-28 1718944 00:00:00.000000000 -1718944 00:00:00.000000000 0 00:00:00.000000000
-6731-02-12 1728212 01:00:00.000000000 -1728212 01:00:00.000000000 0 00:00:00.000000000
-7160-12-02 1885195 01:00:00.000000000 -1885195 01:00:00.000000000 0 00:00:00.000000000
+6731-02-12 1728212 00:00:00.000000000 -1728212 00:00:00.000000000 0 00:00:00.000000000
+7160-12-02 1885195 00:00:00.000000000 -1885195 00:00:00.000000000 0 00:00:00.000000000
7409-09-07 1976054 00:00:00.000000000 -1976054 00:00:00.000000000 0 00:00:00.000000000
7503-06-23 2010310 00:00:00.000000000 -2010310 00:00:00.000000000 0 00:00:00.000000000
8422-07-22 2345998 00:00:00.000000000 -2345998 00:00:00.000000000 0 00:00:00.000000000
-8521-01-16 2381970 01:00:00.000000000 -2381970 01:00:00.000000000 0 00:00:00.000000000
+8521-01-16 2381970 00:00:00.000000000 -2381970 00:00:00.000000000 0 00:00:00.000000000
9075-06-13 2584462 00:00:00.000000000 -2584462 00:00:00.000000000 0 00:00:00.000000000
-9209-11-11 2633556 01:00:00.000000000 -2633556 01:00:00.000000000 0 00:00:00.000000000
-9403-01-09 2704106 01:00:00.000000000 -2704106 01:00:00.000000000 0 00:00:00.000000000
+9209-11-11 2633556 00:00:00.000000000 -2633556 00:00:00.000000000 0 00:00:00.000000000
+9403-01-09 2704106 00:00:00.000000000 -2704106 00:00:00.000000000 0 00:00:00.000000000
PREHOOK: query: explain vectorization expression
select
tsval,
@@ -539,53 +539,53 @@ POSTHOOK: Input: default@interval_arithmetic_1
tsval _c1 _c2 _c3 _c4 _c5 _c6
0004-09-22 18:26:29.519542222 0002-07-22 18:26:29.519542222 0006-11-22 18:26:29.519542222 0006-11-22 18:26:29.519542222 0002-07-22 18:26:29.519542222 0002-07-22 18:26:29.519542222 0006-11-22 18:26:29.519542222
0528-10-27 08:15:18.941718273 0526-08-27 08:15:18.941718273 0530-12-27 08:15:18.941718273 0530-12-27 08:15:18.941718273 0526-08-27 08:15:18.941718273 0526-08-27 08:15:18.941718273 0530-12-27 08:15:18.941718273
-1319-02-02 16:31:57.778 1316-12-02 16:31:57.778 1321-04-02 16:31:57.778 1321-04-02 16:31:57.778 1316-12-02 16:31:57.778 1316-12-02 16:31:57.778 1321-04-02 16:31:57.778
+1319-02-02 16:31:57.778 1316-12-03 16:31:57.778 1321-04-02 16:31:57.778 1321-04-02 16:31:57.778 1316-12-03 16:31:57.778 1316-12-03 16:31:57.778 1321-04-02 16:31:57.778
1404-07-23 15:32:16.059185026 1402-05-23 15:32:16.059185026 1406-09-23 15:32:16.059185026 1406-09-23 15:32:16.059185026 1402-05-23 15:32:16.059185026 1402-05-23 15:32:16.059185026 1406-09-23 15:32:16.059185026
1815-05-06 00:12:37.543584705 1813-03-06 00:12:37.543584705 1817-07-06 00:12:37.543584705 1817-07-06 00:12:37.543584705 1813-03-06 00:12:37.543584705 1813-03-06 00:12:37.543584705 1817-07-06 00:12:37.543584705
1883-04-17 04:14:34.647766229 1881-02-17 04:14:34.647766229 1885-06-17 04:14:34.647766229 1885-06-17 04:14:34.647766229 1881-02-17 04:14:34.647766229 1881-02-17 04:14:34.647766229 1885-06-17 04:14:34.647766229
1966-08-16 13:36:50.183618031 1964-06-16 13:36:50.183618031 1968-10-16 13:36:50.183618031 1968-10-16 13:36:50.183618031 1964-06-16 13:36:50.183618031 1964-06-16 13:36:50.183618031 1968-10-16 13:36:50.183618031
-1973-04-17 06:30:38.596784156 1971-02-17 06:30:38.596784156 1975-06-17 07:30:38.596784156 1975-06-17 07:30:38.596784156 1971-02-17 06:30:38.596784156 1971-02-17 06:30:38.596784156 1975-06-17 07:30:38.596784156
-1974-10-04 17:21:03.989 1972-08-04 17:21:03.989 1976-12-04 16:21:03.989 1976-12-04 16:21:03.989 1972-08-04 17:21:03.989 1972-08-04 17:21:03.989 1976-12-04 16:21:03.989
-1976-03-03 04:54:33.000895162 1974-01-03 04:54:33.000895162 1978-05-03 05:54:33.000895162 1978-05-03 05:54:33.000895162 1974-01-03 04:54:33.000895162 1974-01-03 04:54:33.000895162 1978-05-03 05:54:33.000895162
+1973-04-17 06:30:38.596784156 1971-02-17 06:30:38.596784156 1975-06-17 06:30:38.596784156 1975-06-17 06:30:38.596784156 1971-02-17 06:30:38.596784156 1971-02-17 06:30:38.596784156 1975-06-17 06:30:38.596784156
+1974-10-04 17:21:03.989 1972-08-04 17:21:03.989 1976-12-04 17:21:03.989 1976-12-04 17:21:03.989 1972-08-04 17:21:03.989 1972-08-04 17:21:03.989 1976-12-04 17:21:03.989
+1976-03-03 04:54:33.000895162 1974-01-03 04:54:33.000895162 1978-05-03 04:54:33.000895162 1978-05-03 04:54:33.000895162 1974-01-03 04:54:33.000895162 1974-01-03 04:54:33.000895162 1978-05-03 04:54:33.000895162
1976-05-06 00:42:30.910786948 1974-03-06 00:42:30.910786948 1978-07-06 00:42:30.910786948 1978-07-06 00:42:30.910786948 1974-03-06 00:42:30.910786948 1974-03-06 00:42:30.910786948 1978-07-06 00:42:30.910786948
1978-08-05 14:41:05.501 1976-06-05 14:41:05.501 1980-10-05 14:41:05.501 1980-10-05 14:41:05.501 1976-06-05 14:41:05.501 1976-06-05 14:41:05.501 1980-10-05 14:41:05.501
-1981-04-25 09:01:12.077192689 1979-02-25 09:01:12.077192689 1983-06-25 10:01:12.077192689 1983-06-25 10:01:12.077192689 1979-02-25 09:01:12.077192689 1979-02-25 09:01:12.077192689 1983-06-25 10:01:12.077192689
-1981-11-15 23:03:10.999338387 1979-09-16 00:03:10.999338387 1984-01-15 23:03:10.999338387 1984-01-15 23:03:10.999338387 1979-09-16 00:03:10.999338387 1979-09-16 00:03:10.999338387 1984-01-15 23:03:10.999338387
+1981-04-25 09:01:12.077192689 1979-02-25 09:01:12.077192689 1983-06-25 09:01:12.077192689 1983-06-25 09:01:12.077192689 1979-02-25 09:01:12.077192689 1979-02-25 09:01:12.077192689 1983-06-25 09:01:12.077192689
+1981-11-15 23:03:10.999338387 1979-09-15 23:03:10.999338387 1984-01-15 23:03:10.999338387 1984-01-15 23:03:10.999338387 1979-09-15 23:03:10.999338387 1979-09-15 23:03:10.999338387 1984-01-15 23:03:10.999338387
1985-07-20 09:30:11 1983-05-20 09:30:11 1987-09-20 09:30:11 1987-09-20 09:30:11 1983-05-20 09:30:11 1983-05-20 09:30:11 1987-09-20 09:30:11
-1985-11-18 16:37:54 1983-09-18 17:37:54 1988-01-18 16:37:54 1988-01-18 16:37:54 1983-09-18 17:37:54 1983-09-18 17:37:54 1988-01-18 16:37:54
-1987-02-21 19:48:29 1984-12-21 19:48:29 1989-04-21 20:48:29 1989-04-21 20:48:29 1984-12-21 19:48:29 1984-12-21 19:48:29 1989-04-21 20:48:29
-1987-05-28 13:52:07.900916635 1985-03-28 12:52:07.900916635 1989-07-28 13:52:07.900916635 1989-07-28 13:52:07.900916635 1985-03-28 12:52:07.900916635 1985-03-28 12:52:07.900916635 1989-07-28 13:52:07.900916635
-1998-10-16 20:05:29.397591987 1996-08-16 20:05:29.397591987 2000-12-16 19:05:29.397591987 2000-12-16 19:05:29.397591987 1996-08-16 20:05:29.397591987 1996-08-16 20:05:29.397591987 2000-12-16 19:05:29.397591987
-1999-10-03 16:59:10.396903939 1997-08-03 16:59:10.396903939 2001-12-03 15:59:10.396903939 2001-12-03 15:59:10.396903939 1997-08-03 16:59:10.396903939 1997-08-03 16:59:10.396903939 2001-12-03 15:59:10.396903939
-2000-12-18 08:42:30.000595596 1998-10-18 09:42:30.000595596 2003-02-18 08:42:30.000595596 2003-02-18 08:42:30.000595596 1998-10-18 09:42:30.000595596 1998-10-18 09:42:30.000595596 2003-02-18 08:42:30.000595596
-2002-05-10 05:29:48.990818073 2000-03-10 04:29:48.990818073 2004-07-10 05:29:48.990818073 2004-07-10 05:29:48.990818073 2000-03-10 04:29:48.990818073 2000-03-10 04:29:48.990818073 2004-07-10 05:29:48.990818073
-2003-09-23 22:33:17.00003252 2001-07-23 22:33:17.00003252 2005-11-23 21:33:17.00003252 2005-11-23 21:33:17.00003252 2001-07-23 22:33:17.00003252 2001-07-23 22:33:17.00003252 2005-11-23 21:33:17.00003252
-2004-03-07 20:14:13 2002-01-07 20:14:13 2006-05-07 21:14:13 2006-05-07 21:14:13 2002-01-07 20:14:13 2002-01-07 20:14:13 2006-05-07 21:14:13
-2007-02-09 05:17:29.368756876 2004-12-09 05:17:29.368756876 2009-04-09 06:17:29.368756876 2009-04-09 06:17:29.368756876 2004-12-09 05:17:29.368756876 2004-12-09 05:17:29.368756876 2009-04-09 06:17:29.368756876
-2009-01-21 10:49:07.108 2006-11-21 10:49:07.108 2011-03-21 11:49:07.108 2011-03-21 11:49:07.108 2006-11-21 10:49:07.108 2006-11-21 10:49:07.108 2011-03-21 11:49:07.108
-2010-04-08 02:43:35.861742727 2008-02-08 01:43:35.861742727 2012-06-08 02:43:35.861742727 2012-06-08 02:43:35.861742727 2008-02-08 01:43:35.861742727 2008-02-08 01:43:35.861742727 2012-06-08 02:43:35.861742727
-2013-04-07 02:44:43.00086821 2011-02-07 01:44:43.00086821 2015-06-07 02:44:43.00086821 2015-06-07 02:44:43.00086821 2011-02-07 01:44:43.00086821 2011-02-07 01:44:43.00086821 2015-06-07 02:44:43.00086821
-2013-04-10 00:43:46.854731546 2011-02-09 23:43:46.854731546 2015-06-10 00:43:46.854731546 2015-06-10 00:43:46.854731546 2011-02-09 23:43:46.854731546 2011-02-09 23:43:46.854731546 2015-06-10 00:43:46.854731546
-2021-09-24 03:18:32.413655165 2019-07-24 03:18:32.413655165 2023-11-24 02:18:32.413655165 2023-11-24 02:18:32.413655165 2019-07-24 03:18:32.413655165 2019-07-24 03:18:32.413655165 2023-11-24 02:18:32.413655165
-2024-11-11 16:42:41.101 2022-09-11 17:42:41.101 2027-01-11 16:42:41.101 2027-01-11 16:42:41.101 2022-09-11 17:42:41.101 2022-09-11 17:42:41.101 2027-01-11 16:42:41.101
+1985-11-18 16:37:54 1983-09-18 16:37:54 1988-01-18 16:37:54 1988-01-18 16:37:54 1983-09-18 16:37:54 1983-09-18 16:37:54 1988-01-18 16:37:54
+1987-02-21 19:48:29 1984-12-21 19:48:29 1989-04-21 19:48:29 1989-04-21 19:48:29 1984-12-21 19:48:29 1984-12-21 19:48:29 1989-04-21 19:48:29
+1987-05-28 13:52:07.900916635 1985-03-28 13:52:07.900916635 1989-07-28 13:52:07.900916635 1989-07-28 13:52:07.900916635 1985-03-28 13:52:07.900916635 1985-03-28 13:52:07.900916635 1989-07-28 13:52:07.900916635
+1998-10-16 20:05:29.397591987 1996-08-16 20:05:29.397591987 2000-12-16 20:05:29.397591987 2000-12-16 20:05:29.397591987 1996-08-16 20:05:29.397591987 1996-08-16 20:05:29.397591987 2000-12-16 20:05:29.397591987
+1999-10-03 16:59:10.396903939 1997-08-03 16:59:10.396903939 2001-12-03 16:59:10.396903939 2001-12-03 16:59:10.396903939 1997-08-03 16:59:10.396903939 1997-08-03 16:59:10.396903939 2001-12-03 16:59:10.396903939
+2000-12-18 08:42:30.000595596 1998-10-18 08:42:30.000595596 2003-02-18 08:42:30.000595596 2003-02-18 08:42:30.000595596 1998-10-18 08:42:30.000595596 1998-10-18 08:42:30.000595596 2003-02-18 08:42:30.000595596
+2002-05-10 05:29:48.990818073 2000-03-10 05:29:48.990818073 2004-07-10 05:29:48.990818073 2004-07-10 05:29:48.990818073 2000-03-10 05:29:48.990818073 2000-03-10 05:29:48.990818073 2004-07-10 05:29:48.990818073
+2003-09-23 22:33:17.00003252 2001-07-23 22:33:17.00003252 2005-11-23 22:33:17.00003252 2005-11-23 22:33:17.00003252 2001-07-23 22:33:17.00003252 2001-07-23 22:33:17.00003252 2005-11-23 22:33:17.00003252
+2004-03-07 20:14:13 2002-01-07 20:14:13 2006-05-07 20:14:13 2006-05-07 20:14:13 2002-01-07 20:14:13 2002-01-07 20:14:13 2006-05-07 20:14:13
+2007-02-09 05:17:29.368756876 2004-12-09 05:17:29.368756876 2009-04-09 05:17:29.368756876 2009-04-09 05:17:29.368756876 2004-12-09 05:17:29.368756876 2004-12-09 05:17:29.368756876 2009-04-09 05:17:29.368756876
+2009-01-21 10:49:07.108 2006-11-21 10:49:07.108 2011-03-21 10:49:07.108 2011-03-21 10:49:07.108 2006-11-21 10:49:07.108 2006-11-21 10:49:07.108 2011-03-21 10:49:07.108
+2010-04-08 02:43:35.861742727 2008-02-08 02:43:35.861742727 2012-06-08 02:43:35.861742727 2012-06-08 02:43:35.861742727 2008-02-08 02:43:35.861742727 2008-02-08 02:43:35.861742727 2012-06-08 02:43:35.861742727
+2013-04-07 02:44:43.00086821 2011-02-07 02:44:43.00086821 2015-06-07 02:44:43.00086821 2015-06-07 02:44:43.00086821 2011-02-07 02:44:43.00086821 2011-02-07 02:44:43.00086821 2015-06-07 02:44:43.00086821
+2013-04-10 00:43:46.854731546 2011-02-10 00:43:46.854731546 2015-06-10 00:43:46.854731546 2015-06-10 00:43:46.854731546 2011-02-10 00:43:46.854731546 2011-02-10 00:43:46.854731546 2015-06-10 00:43:46.854731546
+2021-09-24 03:18:32.413655165 2019-07-24 03:18:32.413655165 2023-11-24 03:18:32.413655165 2023-11-24 03:18:32.413655165 2019-07-24 03:18:32.413655165 2019-07-24 03:18:32.413655165 2023-11-24 03:18:32.413655165
+2024-11-11 16:42:41.101 2022-09-11 16:42:41.101 2027-01-11 16:42:41.101 2027-01-11 16:42:41.101 2022-09-11 16:42:41.101 2022-09-11 16:42:41.101 2027-01-11 16:42:41.101
4143-07-08 10:53:27.252802259 4141-05-08 10:53:27.252802259 4145-09-08 10:53:27.252802259 4145-09-08 10:53:27.252802259 4141-05-08 10:53:27.252802259 4141-05-08 10:53:27.252802259 4145-09-08 10:53:27.252802259
-4966-12-04 09:30:55.202 4964-10-04 10:30:55.202 4969-02-04 09:30:55.202 4969-02-04 09:30:55.202 4964-10-04 10:30:55.202 4964-10-04 10:30:55.202 4969-02-04 09:30:55.202
-5339-02-01 14:10:01.085678691 5336-12-01 14:10:01.085678691 5341-04-01 15:10:01.085678691 5341-04-01 15:10:01.085678691 5336-12-01 14:10:01.085678691 5336-12-01 14:10:01.085678691 5341-04-01 15:10:01.085678691
-5344-10-04 18:40:08.165 5342-08-04 18:40:08.165 5346-12-04 17:40:08.165 5346-12-04 17:40:08.165 5342-08-04 18:40:08.165 5342-08-04 18:40:08.165 5346-12-04 17:40:08.165
+4966-12-04 09:30:55.202 4964-10-04 09:30:55.202 4969-02-04 09:30:55.202 4969-02-04 09:30:55.202 4964-10-04 09:30:55.202 4964-10-04 09:30:55.202 4969-02-04 09:30:55.202
+5339-02-01 14:10:01.085678691 5336-12-01 14:10:01.085678691 5341-04-01 14:10:01.085678691 5341-04-01 14:10:01.085678691 5336-12-01 14:10:01.085678691 5336-12-01 14:10:01.085678691 5341-04-01 14:10:01.085678691
+5344-10-04 18:40:08.165 5342-08-04 18:40:08.165 5346-12-04 18:40:08.165 5346-12-04 18:40:08.165 5342-08-04 18:40:08.165 5342-08-04 18:40:08.165 5346-12-04 18:40:08.165
5397-07-13 07:12:32.000896438 5395-05-13 07:12:32.000896438 5399-09-13 07:12:32.000896438 5399-09-13 07:12:32.000896438 5395-05-13 07:12:32.000896438 5395-05-13 07:12:32.000896438 5399-09-13 07:12:32.000896438
5966-07-09 03:30:50.597 5964-05-09 03:30:50.597 5968-09-09 03:30:50.597 5968-09-09 03:30:50.597 5964-05-09 03:30:50.597 5964-05-09 03:30:50.597 5968-09-09 03:30:50.597
6229-06-28 02:54:28.970117179 6227-04-28 02:54:28.970117179 6231-08-28 02:54:28.970117179 6231-08-28 02:54:28.970117179 6227-04-28 02:54:28.970117179 6227-04-28 02:54:28.970117179 6231-08-28 02:54:28.970117179
-6482-04-27 12:07:38.073915413 6480-02-27 11:07:38.073915413 6484-06-27 12:07:38.073915413 6484-06-27 12:07:38.073915413 6480-02-27 11:07:38.073915413 6480-02-27 11:07:38.073915413 6484-06-27 12:07:38.073915413
-6631-11-13 16:31:29.702202248 6629-09-13 17:31:29.702202248 6634-01-13 16:31:29.702202248 6634-01-13 16:31:29.702202248 6629-09-13 17:31:29.702202248 6629-09-13 17:31:29.702202248 6634-01-13 16:31:29.702202248
-6705-09-28 18:27:28.000845672 6703-07-28 18:27:28.000845672 6707-11-28 17:27:28.000845672 6707-11-28 17:27:28.000845672 6703-07-28 18:27:28.000845672 6703-07-28 18:27:28.000845672 6707-11-28 17:27:28.000845672
-6731-02-12 08:12:48.287783702 6728-12-12 08:12:48.287783702 6733-04-12 09:12:48.287783702 6733-04-12 09:12:48.287783702 6728-12-12 08:12:48.287783702 6728-12-12 08:12:48.287783702 6733-04-12 09:12:48.287783702
-7160-12-02 06:00:24.81200852 7158-10-02 07:00:24.81200852 7163-02-02 06:00:24.81200852 7163-02-02 06:00:24.81200852 7158-10-02 07:00:24.81200852 7158-10-02 07:00:24.81200852 7163-02-02 06:00:24.81200852
-7409-09-07 23:33:32.459349602 7407-07-07 23:33:32.459349602 7411-11-07 22:33:32.459349602 7411-11-07 22:33:32.459349602 7407-07-07 23:33:32.459349602 7407-07-07 23:33:32.459349602 7411-11-07 22:33:32.459349602
+6482-04-27 12:07:38.073915413 6480-02-27 12:07:38.073915413 6484-06-27 12:07:38.073915413 6484-06-27 12:07:38.073915413 6480-02-27 12:07:38.073915413 6480-02-27 12:07:38.073915413 6484-06-27 12:07:38.073915413
+6631-11-13 16:31:29.702202248 6629-09-13 16:31:29.702202248 6634-01-13 16:31:29.702202248 6634-01-13 16:31:29.702202248 6629-09-13 16:31:29.702202248 6629-09-13 16:31:29.702202248 6634-01-13 16:31:29.702202248
+6705-09-28 18:27:28.000845672 6703-07-28 18:27:28.000845672 6707-11-28 18:27:28.000845672 6707-11-28 18:27:28.000845672 6703-07-28 18:27:28.000845672 6703-07-28 18:27:28.000845672 6707-11-28 18:27:28.000845672
+6731-02-12 08:12:48.287783702 6728-12-12 08:12:48.287783702 6733-04-12 08:12:48.287783702 6733-04-12 08:12:48.287783702 6728-12-12 08:12:48.287783702 6728-12-12 08:12:48.287783702 6733-04-12 08:12:48.287783702
+7160-12-02 06:00:24.81200852 7158-10-02 06:00:24.81200852 7163-02-02 06:00:24.81200852 7163-02-02 06:00:24.81200852 7158-10-02 06:00:24.81200852 7158-10-02 06:00:24.81200852 7163-02-02 06:00:24.81200852
+7409-09-07 23:33:32.459349602 7407-07-07 23:33:32.459349602 7411-11-07 23:33:32.459349602 7411-11-07 23:33:32.459349602 7407-07-07 23:33:32.459349602 7407-07-07 23:33:32.459349602 7411-11-07 23:33:32.459349602
7503-06-23 23:14:17.486 7501-04-23 23:14:17.486 7505-08-23 23:14:17.486 7505-08-23 23:14:17.486 7501-04-23 23:14:17.486 7501-04-23 23:14:17.486 7505-08-23 23:14:17.486
8422-07-22 03:21:45.745036084 8420-05-22 03:21:45.745036084 8424-09-22 03:21:45.745036084 8424-09-22 03:21:45.745036084 8420-05-22 03:21:45.745036084 8420-05-22 03:21:45.745036084 8424-09-22 03:21:45.745036084
-8521-01-16 20:42:05.668832388 8518-11-16 20:42:05.668832388 8523-03-16 21:42:05.668832388 8523-03-16 21:42:05.668832388 8518-11-16 20:42:05.668832388 8518-11-16 20:42:05.668832388 8523-03-16 21:42:05.668832388
+8521-01-16 20:42:05.668832388 8518-11-16 20:42:05.668832388 8523-03-16 20:42:05.668832388 8523-03-16 20:42:05.668832388 8518-11-16 20:42:05.668832388 8518-11-16 20:42:05.668832388 8523-03-16 20:42:05.668832388
9075-06-13 16:20:09.218517797 9073-04-13 16:20:09.218517797 9077-08-13 16:20:09.218517797 9077-08-13 16:20:09.218517797 9073-04-13 16:20:09.218517797 9073-04-13 16:20:09.218517797 9077-08-13 16:20:09.218517797
-9209-11-11 04:08:58.223768453 9207-09-11 05:08:58.223768453 9212-01-11 04:08:58.223768453 9212-01-11 04:08:58.223768453 9207-09-11 05:08:58.223768453 9207-09-11 05:08:58.223768453 9212-01-11 04:08:58.223768453
+9209-11-11 04:08:58.223768453 9207-09-11 04:08:58.223768453 9212-01-11 04:08:58.223768453 9212-01-11 04:08:58.223768453 9207-09-11 04:08:58.223768453 9207-09-11 04:08:58.223768453 9212-01-11 04:08:58.223768453
9403-01-09 18:12:33.547 9400-11-09 18:12:33.547 9405-03-09 18:12:33.547 9405-03-09 18:12:33.547 9400-11-09 18:12:33.547 9400-11-09 18:12:33.547 9405-03-09 18:12:33.547
PREHOOK: query: explain vectorization expression
select
@@ -832,50 +832,50 @@ dateval _c1 _c2 _c3 _c4 _c5 _c6
1404-07-23 1404-04-14 12:37:26.876543211 1404-10-30 11:22:33.123456789 1404-10-30 11:22:33.123456789 1404-04-14 12:37:26.876543211 1404-04-14 12:37:26.876543211 1404-10-30 11:22:33.123456789
1815-05-06 1815-01-26 12:37:26.876543211 1815-08-13 11:22:33.123456789 1815-08-13 11:22:33.123456789 1815-01-26 12:37:26.876543211 1815-01-26 12:37:26.876543211 1815-08-13 11:22:33.123456789
1883-04-17 1883-01-07 12:37:26.876543211 1883-07-25 11:22:33.123456789 1883-07-25 11:22:33.123456789 1883-01-07 12:37:26.876543211 1883-01-07 12:37:26.876543211 1883-07-25 11:22:33.123456789
-1966-08-16 1966-05-08 12:37:26.876543211 1966-11-23 10:22:33.123456789 1966-11-23 10:22:33.123456789 1966-05-08 12:37:26.876543211 1966-05-08 12:37:26.876543211 1966-11-23 10:22:33.123456789
-1973-04-17 1973-01-07 12:37:26.876543211 1973-07-25 12:22:33.123456789 1973-07-25 12:22:33.123456789 1973-01-07 12:37:26.876543211 1973-01-07 12:37:26.876543211 1973-07-25 12:22:33.123456789
-1974-10-04 1974-06-26 12:37:26.876543211 1975-01-11 10:22:33.123456789 1975-01-11 10:22:33.123456789 1974-06-26 12:37:26.876543211 1974-06-26 12:37:26.876543211 1975-01-11 10:22:33.123456789
-1976-03-03 1975-11-24 12:37:26.876543211 1976-06-10 12:22:33.123456789 1976-06-10 12:22:33.123456789 1975-11-24 12:37:26.876543211 1975-11-24 12:37:26.876543211 1976-06-10 12:22:33.123456789
-1976-05-06 1976-01-27 11:37:26.876543211 1976-08-13 11:22:33.123456789 1976-08-13 11:22:33.123456789 1976-01-27 11:37:26.876543211 1976-01-27 11:37:26.876543211 1976-08-13 11:22:33.123456789
-1978-08-05 1978-04-27 11:37:26.876543211 1978-11-12 10:22:33.123456789 1978-11-12 10:22:33.123456789 1978-04-27 11:37:26.876543211 1978-04-27 11:37:26.876543211 1978-11-12 10:22:33.123456789
-1981-04-25 1981-01-15 12:37:26.876543211 1981-08-02 12:22:33.123456789 1981-08-02 12:22:33.123456789 1981-01-15 12:37:26.876543211 1981-01-15 12:37:26.876543211 1981-08-02 12:22:33.123456789
-1981-11-15 1981-08-07 13:37:26.876543211 1982-02-22 11:22:33.123456789 1982-02-22 11:22:33.123456789 1981-08-07 13:37:26.876543211 1981-08-07 13:37:26.876543211 1982-02-22 11:22:33.123456789
-1985-07-20 1985-04-11 11:37:26.876543211 1985-10-27 10:22:33.123456789 1985-10-27 10:22:33.123456789 1985-04-11 11:37:26.876543211 1985-04-11 11:37:26.876543211 1985-10-27 10:22:33.123456789
-1985-11-18 1985-08-10 13:37:26.876543211 1986-02-25 11:22:33.123456789 1986-02-25 11:22:33.123456789 1985-08-10 13:37:26.876543211 1985-08-10 13:37:26.876543211 1986-02-25 11:22:33.123456789
-1987-02-21 1986-11-13 12:37:26.876543211 1987-05-31 12:22:33.123456789 1987-05-31 12:22:33.123456789 1986-11-13 12:37:26.876543211 1986-11-13 12:37:26.876543211 1987-05-31 12:22:33.123456789
-1987-05-28 1987-02-17 11:37:26.876543211 1987-09-04 11:22:33.123456789 1987-09-04 11:22:33.123456789 1987-02-17 11:37:26.876543211 1987-02-17 11:37:26.876543211 1987-09-04 11:22:33.123456789
-1998-10-16 1998-07-08 12:37:26.876543211 1999-01-23 10:22:33.123456789 1999-01-23 10:22:33.123456789 1998-07-08 12:37:26.876543211 1998-07-08 12:37:26.876543211 1999-01-23 10:22:33.123456789
-1999-10-03 1999-06-25 12:37:26.876543211 2000-01-10 10:22:33.123456789 2000-01-10 10:22:33.123456789 1999-06-25 12:37:26.876543211 1999-06-25 12:37:26.876543211 2000-01-10 10:22:33.123456789
-2000-12-18 2000-09-09 13:37:26.876543211 2001-03-27 11:22:33.123456789 2001-03-27 11:22:33.123456789 2000-09-09 13:37:26.876543211 2000-09-09 13:37:26.876543211 2001-03-27 11:22:33.123456789
-2002-05-10 2002-01-30 11:37:26.876543211 2002-08-17 11:22:33.123456789 2002-08-17 11:22:33.123456789 2002-01-30 11:37:26.876543211 2002-01-30 11:37:26.876543211 2002-08-17 11:22:33.123456789
-2003-09-23 2003-06-15 12:37:26.876543211 2003-12-31 10:22:33.123456789 2003-12-31 10:22:33.123456789 2003-06-15 12:37:26.876543211 2003-06-15 12:37:26.876543211 2003-12-31 10:22:33.123456789
-2004-03-07 2003-11-28 12:37:26.876543211 2004-06-14 12:22:33.123456789 2004-06-14 12:22:33.123456789 2003-11-28 12:37:26.876543211 2003-11-28 12:37:26.876543211 2004-06-14 12:22:33.123456789
-2007-02-09 2006-11-01 12:37:26.876543211 2007-05-19 12:22:33.123456789 2007-05-19 12:22:33.123456789 2006-11-01 12:37:26.876543211 2006-11-01 12:37:26.876543211 2007-05-19 12:22:33.123456789
-2009-01-21 2008-10-13 13:37:26.876543211 2009-04-30 12:22:33.123456789 2009-04-30 12:22:33.123456789 2008-10-13 13:37:26.876543211 2008-10-13 13:37:26.876543211 2009-04-30 12:22:33.123456789
-2010-04-08 2009-12-29 11:37:26.876543211 2010-07-16 11:22:33.123456789 2010-07-16 11:22:33.123456789 2009-12-29 11:37:26.876543211 2009-12-29 11:37:26.876543211 2010-07-16 11:22:33.123456789
-2013-04-07 2012-12-28 11:37:26.876543211 2013-07-15 11:22:33.123456789 2013-07-15 11:22:33.123456789 2012-12-28 11:37:26.876543211 2012-12-28 11:37:26.876543211 2013-07-15 11:22:33.123456789
-2013-04-10 2012-12-31 11:37:26.876543211 2013-07-18 11:22:33.123456789 2013-07-18 11:22:33.123456789 2012-12-31 11:37:26.876543211 2012-12-31 11:37:26.876543211 2013-07-18 11:22:33.123456789
-2021-09-24 2021-06-16 12:37:26.876543211 2022-01-01 10:22:33.123456789 2022-01-01 10:22:33.123456789 2021-06-16 12:37:26.876543211 2021-06-16 12:37:26.876543211 2022-01-01 10:22:33.123456789
-2024-11-11 2024-08-03 13:37:26.876543211 2025-02-18 11:22:33.123456789 2025-02-18 11:22:33.123456789 2024-08-03 13:37:26.876543211 2024-08-03 13:37:26.876543211 2025-02-18 11:22:33.123456789
+1966-08-16 1966-05-08 12:37:26.876543211 1966-11-23 11:22:33.123456789 1966-11-23 11:22:33.123456789 1966-05-08 12:37:26.876543211 1966-05-08 12:37:26.876543211 1966-11-23 11:22:33.123456789
+1973-04-17 1973-01-07 12:37:26.876543211 1973-07-25 11:22:33.123456789 1973-07-25 11:22:33.123456789 1973-01-07 12:37:26.876543211 1973-01-07 12:37:26.876543211 1973-07-25 11:22:33.123456789
+1974-10-04 1974-06-26 12:37:26.876543211 1975-01-11 11:22:33.123456789 1975-01-11 11:22:33.123456789 1974-06-26 12:37:26.876543211 1974-06-26 12:37:26.876543211 1975-01-11 11:22:33.123456789
+1976-03-03 1975-11-24 12:37:26.876543211 1976-06-10 11:22:33.123456789 1976-06-10 11:22:33.123456789 1975-11-24 12:37:26.876543211 1975-11-24 12:37:26.876543211 1976-06-10 11:22:33.123456789
+1976-05-06 1976-01-27 12:37:26.876543211 1976-08-13 11:22:33.123456789 1976-08-13 11:22:33.123456789 1976-01-27 12:37:26.876543211 1976-01-27 12:37:26.876543211 1976-08-13 11:22:33.123456789
+1978-08-05 1978-04-27 12:37:26.876543211 1978-11-12 11:22:33.123456789 1978-11-12 11:22:33.123456789 1978-04-27 12:37:26.876543211 1978-04-27 12:37:26.876543211 1978-11-12 11:22:33.123456789
+1981-04-25 1981-01-15 12:37:26.876543211 1981-08-02 11:22:33.123456789 1981-08-02 11:22:33.123456789 1981-01-15 12:37:26.876543211 1981-01-15 12:37:26.876543211 1981-08-02 11:22:33.123456789
+1981-11-15 1981-08-07 12:37:26.876543211 1982-02-22 11:22:33.123456789 1982-02-22 11:22:33.123456789 1981-08-07 12:37:26.876543211 1981-08-07 12:37:26.876543211 1982-02-22 11:22:33.123456789
+1985-07-20 1985-04-11 12:37:26.876543211 1985-10-27 11:22:33.123456789 1985-10-27 11:22:33.123456789 1985-04-11 12:37:26.876543211 1985-04-11 12:37:26.876543211 1985-10-27 11:22:33.123456789
+1985-11-18 1985-08-10 12:37:26.876543211 1986-02-25 11:22:33.123456789 1986-02-25 11:22:33.123456789 1985-08-10 12:37:26.876543211 1985-08-10 12:37:26.876543211 1986-02-25 11:22:33.123456789
+1987-02-21 1986-11-13 12:37:26.876543211 1987-05-31 11:22:33.123456789 1987-05-31 11:22:33.123456789 1986-11-13 12:37:26.876543211 1986-11-13 12:37:26.876543211 1987-05-31 11:22:33.123456789
+1987-05-28 1987-02-17 12:37:26.876543211 1987-09-04 11:22:33.123456789 1987-09-04 11:22:33.123456789 1987-02-17 12:37:26.876543211 1987-02-17 12:37:26.876543211 1987-09-04 11:22:33.123456789
+1998-10-16 1998-07-08 12:37:26.876543211 1999-01-23 11:22:33.123456789 1999-01-23 11:22:33.123456789 1998-07-08 12:37:26.876543211 1998-07-08 12:37:26.876543211 1999-01-23 11:22:33.123456789
+1999-10-03 1999-06-25 12:37:26.876543211 2000-01-10 11:22:33.123456789 2000-01-10 11:22:33.123456789 1999-06-25 12:37:26.876543211 1999-06-25 12:37:26.876543211 2000-01-10 11:22:33.123456789
+2000-12-18 2000-09-09 12:37:26.876543211 2001-03-27 11:22:33.123456789 2001-03-27 11:22:33.123456789 2000-09-09 12:37:26.876543211 2000-09-09 12:37:26.876543211 2001-03-27 11:22:33.123456789
+2002-05-10 2002-01-30 12:37:26.876543211 2002-08-17 11:22:33.123456789 2002-08-17 11:22:33.123456789 2002-01-30 12:37:26.876543211 2002-01-30 12:37:26.876543211 2002-08-17 11:22:33.123456789
+2003-09-23 2003-06-15 12:37:26.876543211 2003-12-31 11:22:33.123456789 2003-12-31 11:22:33.123456789 2003-06-15 12:37:26.876543211 2003-06-15 12:37:26.876543211 2003-12-31 11:22:33.123456789
+2004-03-07 2003-11-28 12:37:26.876543211 2004-06-14 11:22:33.123456789 2004-06-14 11:22:33.123456789 2003-11-28 12:37:26.876543211 2003-11-28 12:37:26.876543211 2004-06-14 11:22:33.123456789
+2007-02-09 2006-11-01 12:37:26.876543211 2007-05-19 11:22:33.123456789 2007-05-19 11:22:33.123456789 2006-11-01 12:37:26.876543211 2006-11-01 12:37:26.876543211 2007-05-19 11:22:33.123456789
+2009-01-21 2008-10-13 12:37:26.876543211 2009-04-30 11:22:33.123456789 2009-04-30 11:22:33.123456789 2008-10-13 12:37:26.876543211 2008-10-13 12:37:26.876543211 2009-04-30 11:22:33.123456789
+2010-04-08 2009-12-29 12:37:26.876543211 2010-07-16 11:22:33.123456789 2010-07-16 11:22:33.123456789 2009-12-29 12:37:26.876543211 2009-12-29 12:37:26.876543211 2010-07-16 11:22:33.123456789
+2013-04-07 2012-12-28 12:37:26.876543211 2013-07-15 11:22:33.123456789 2013-07-15 11:22:33.123456789 2012-12-28 12:37:26.876543211 2012-12-28 12:37:26.876543211 2013-07-15 11:22:33.123456789
+2013-04-10 2012-12-31 12:37:26.876543211 2013-07-18 11:22:33.123456789 2013-07-18 11:22:33.123456789 2012-12-31 12:37:26.876543211 2012-12-31 12:37:26.876543211 2013-07-18 11:22:33.123456789
+2021-09-24 2021-06-16 12:37:26.876543211 2022-01-01 11:22:33.123456789 2022-01-01 11:22:33.123456789 2021-06-16 12:37:26.876543211 2021-06-16 12:37:26.876543211 2022-01-01 11:22:33.123456789
+2024-11-11 2024-08-03 12:37:26.876543211 2025-02-18 11:22:33.123456789 2025-02-18 11:22:33.123456789 2024-08-03 12:37:26.876543211 2024-08-03 12:37:26.876543211 2025-02-18 11:22:33.123456789
4143-07-08 4143-03-30 12:37:26.876543211 4143-10-15 11:22:33.123456789 4143-10-15 11:22:33.123456789 4143-03-30 12:37:26.876543211 4143-03-30 12:37:26.876543211 4143-10-15 11:22:33.123456789
-4966-12-04 4966-08-26 13:37:26.876543211 4967-03-13 12:22:33.123456789 4967-03-13 12:22:33.123456789 4966-08-26 13:37:26.876543211 4966-08-26 13:37:26.876543211 4967-03-13 12:22:33.123456789
-5339-02-01 5338-10-24 13:37:26.876543211 5339-05-11 12:22:33.123456789 5339-05-11 12:22:33.123456789 5338-10-24 13:37:26.876543211 5338-10-24 13:37:26.876543211 5339-05-11 12:22:33.123456789
-5344-10-04 5344-06-26 12:37:26.876543211 5345-01-11 10:22:33.123456789 5345-01-11 10:22:33.123456789 5344-06-26 12:37:26.876543211 5344-06-26 12:37:26.876543211 5345-01-11 10:22:33.123456789
+4966-12-04 4966-08-26 12:37:26.876543211 4967-03-13 11:22:33.123456789 4967-03-13 11:22:33.123456789 4966-08-26 12:37:26.876543211 4966-08-26 12:37:26.876543211 4967-03-13 11:22:33.123456789
+5339-02-01 5338-10-24 12:37:26.876543211 5339-05-11 11:22:33.123456789 5339-05-11 11:22:33.123456789 5338-10-24 12:37:26.876543211 5338-10-24 12:37:26.876543211 5339-05-11 11:22:33.123456789
+5344-10-04 5344-06-26 12:37:26.876543211 5345-01-11 11:22:33.123456789 5345-01-11 11:22:33.123456789 5344-06-26 12:37:26.876543211 5344-06-26 12:37:26.876543211 5345-01-11 11:22:33.123456789
5397-07-13 5397-04-04 12:37:26.876543211 5397-10-20 11:22:33.123456789 5397-10-20 11:22:33.123456789 5397-04-04 12:37:26.876543211 5397-04-04 12:37:26.876543211 5397-10-20 11:22:33.123456789
5966-07-09 5966-03-31 12:37:26.876543211 5966-10-16 11:22:33.123456789 5966-10-16 11:22:33.123456789 5966-03-31 12:37:26.876543211 5966-03-31 12:37:26.876543211 5966-10-16 11:22:33.123456789
6229-06-28 6229-03-20 12:37:26.876543211 6229-10-05 11:22:33.123456789 6229-10-05 11:22:33.123456789 6229-03-20 12:37:26.876543211 6229-03-20 12:37:26.876543211 6229-10-05 11:22:33.123456789
-6482-04-27 6482-01-17 11:37:26.876543211 6482-08-04 11:22:33.123456789 6482-08-04 11:22:33.123456789 6482-01-17 11:37:26.876543211 6482-01-17 11:37:26.876543211 6482-08-04 11:22:33.123456789
-6631-11-13 6631-08-05 13:37:26.876543211 6632-02-20 11:22:33.123456789 6632-02-20 11:22:33.123456789 6631-08-05 13:37:26.876543211 6631-08-05 13:37:26.876543211 6632-02-20 11:22:33.123456789
-6705-09-28 6705-06-20 12:37:26.876543211 6706-01-05 10:22:33.123456789 6706-01-05 10:22:33.123456789 6705-06-20 12:37:26.876543211 6705-06-20 12:37:26.876543211 6706-01-05 10:22:33.123456789
-6731-02-12 6730-11-04 12:37:26.876543211 6731-05-22 12:22:33.123456789 6731-05-22 12:22:33.123456789 6730-11-04 12:37:26.876543211 6730-11-04 12:37:26.876543211 6731-05-22 12:22:33.123456789
-7160-12-02 7160-08-24 13:37:26.876543211 7161-03-11 11:22:33.123456789 7161-03-11 11:22:33.123456789 7160-08-24 13:37:26.876543211 7160-08-24 13:37:26.876543211 7161-03-11 11:22:33.123456789
-7409-09-07 7409-05-30 12:37:26.876543211 7409-12-15 10:22:33.123456789 7409-12-15 10:22:33.123456789 7409-05-30 12:37:26.876543211 7409-05-30 12:37:26.876543211 7409-12-15 10:22:33.123456789
+6482-04-27 6482-01-17 12:37:26.876543211 6482-08-04 11:22:33.123456789 6482-08-04 11:22:33.123456789 6482-01-17 12:37:26.876543211 6482-01-17 12:37:26.876543211 6482-08-04 11:22:33.123456789
+6631-11-13 6631-08-05 12:37:26.876543211 6632-02-20 11:22:33.123456789 6632-02-20 11:22:33.123456789 6631-08-05 12:37:26.876543211 6631-08-05 12:37:26.876543211 6632-02-20 11:22:33.123456789
+6705-09-28 6705-06-20 12:37:26.876543211 6706-01-05 11:22:33.123456789 6706-01-05 11:22:33.123456789 6705-06-20 12:37:26.876543211 6705-06-20 12:37:26.876543211 6706-01-05 11:22:33.123456789
+6731-02-12 6730-11-04 12:37:26.876543211 6731-05-22 11:22:33.123456789 6731-05-22 11:22:33.123456789 6730-11-04 12:37:26.876543211 6730-11-04 12:37:26.876543211 6731-05-22 11:22:33.123456789
+7160-12-02 7160-08-24 12:37:26.876543211 7161-03-11 11:22:33.123456789 7161-03-11 11:22:33.123456789 7160-08-24 12:37:26.876543211 7160-08-24 12:37:26.876543211 7161-03-11 11:22:33.123456789
+7409-09-07 7409-05-30 12:37:26.876543211 7409-12-15 11:22:33.123456789 7409-12-15 11:22:33.123456789 7409-05-30 12:37:26.876543211 7409-05-30 12:37:26.876543211 7409-12-15 11:22:33.123456789
7503-06-23 7503-03-15 12:37:26.876543211 7503-09-30 11:22:33.123456789 7503-09-30 11:22:33.123456789 7503-03-15 12:37:26.876543211 7503-03-15 12:37:26.876543211 7503-09-30 11:22:33.123456789
8422-07-22 8422-04-13 12:37:26.876543211 8422-10-29 11:22:33.123456789 8422-10-29 11:22:33.123456789 8422-04-13 12:37:26.876543211 8422-04-13 12:37:26.876543211 8422-10-29 11:22:33.123456789
-8521-01-16 8520-10-08 13:37:26.876543211 8521-04-25 12:22:33.123456789 8521-04-25 12:22:33.123456789 8520-10-08 13:37:26.876543211 8520-10-08 13:37:26.876543211 8521-04-25 12:22:33.123456789
-9075-06-13 9075-03-05 11:37:26.876543211 9075-09-20 11:22:33.123456789 9075-09-20 11:22:33.123456789 9075-03-05 11:37:26.876543211 9075-03-05 11:37:26.876543211 9075-09-20 11:22:33.123456789
-9209-11-11 9209-08-03 13:37:26.876543211 9210-02-18 11:22:33.123456789 9210-02-18 11:22:33.123456789 9209-08-03 13:37:26.876543211 9209-08-03 13:37:26.876543211 9210-02-18 11:22:33.123456789
-9403-01-09 9402-10-01 13:37:26.876543211 9403-04-18 12:22:33.123456789 9403-04-18 12:22:33.123456789 9402-10-01 13:37:26.876543211 9402-10-01 13:37:26.876543211 9403-04-18 12:22:33.123456789
+8521-01-16 8520-10-08 12:37:26.876543211 8521-04-25 11:22:33.123456789 8521-04-25 11:22:33.123456789 8520-10-08 12:37:26.876543211 8520-10-08 12:37:26.876543211 8521-04-25 11:22:33.123456789
+9075-06-13 9075-03-05 12:37:26.876543211 9075-09-20 11:22:33.123456789 9075-09-20 11:22:33.123456789 9075-03-05 12:37:26.876543211 9075-03-05 12:37:26.876543211 9075-09-20 11:22:33.123456789
+9209-11-11 9209-08-03 12:37:26.876543211 9210-02-18 11:22:33.123456789 9210-02-18 11:22:33.123456789 9209-08-03 12:37:26.876543211 9209-08-03 12:37:26.876543211 9210-02-18 11:22:33.123456789
+9403-01-09 9402-10-01 12:37:26.876543211 9403-04-18 11:22:33.123456789 9403-04-18 11:22:33.123456789 9402-10-01 12:37:26.876543211 9402-10-01 12:37:26.876543211 9403-04-18 11:22:33.123456789
PREHOOK: query: explain vectorization expression
select
dateval,
@@ -1200,50 +1200,50 @@ tsval _c1 _c2 _c3 _c4 _c5 _c6
1404-07-23 15:32:16.059185026 1404-04-15 04:09:42.935728237 1404-10-31 02:54:49.182641815 1404-10-31 02:54:49.182641815 1404-04-15 04:09:42.935728237 1404-04-15 04:09:42.935728237 1404-10-31 02:54:49.182641815
1815-05-06 00:12:37.543584705 1815-01-26 12:50:04.420127916 1815-08-13 11:35:10.667041494 1815-08-13 11:35:10.667041494 1815-01-26 12:50:04.420127916 1815-01-26 12:50:04.420127916 1815-08-13 11:35:10.667041494
1883-04-17 04:14:34.647766229 1883-01-07 16:52:01.52430944 1883-07-25 15:37:07.771223018 1883-07-25 15:37:07.771223018 1883-01-07 16:52:01.52430944 1883-01-07 16:52:01.52430944 1883-07-25 15:37:07.771223018
-1966-08-16 13:36:50.183618031 1966-05-09 02:14:17.060161242 1966-11-23 23:59:23.30707482 1966-11-23 23:59:23.30707482 1966-05-09 02:14:17.060161242 1966-05-09 02:14:17.060161242 1966-11-23 23:59:23.30707482
-1973-04-17 06:30:38.596784156 1973-01-07 19:08:05.473327367 1973-07-25 18:53:11.720240945 1973-07-25 18:53:11.720240945 1973-01-07 19:08:05.473327367 1973-01-07 19:08:05.473327367 1973-07-25 18:53:11.720240945
-1974-10-04 17:21:03.989 1974-06-27 05:58:30.865543211 1975-01-12 03:43:37.112456789 1975-01-12 03:43:37.112456789 1974-06-27 05:58:30.865543211 1974-06-27 05:58:30.865543211 1975-01-12 03:43:37.112456789
-1976-03-03 04:54:33.000895162 1975-11-24 17:31:59.877438373 1976-06-10 17:17:06.124351951 1976-06-10 17:17:06.124351951 1975-11-24 17:31:59.877438373 1975-11-24 17:31:59.877438373 1976-06-10 17:17:06.124351951
-1976-05-06 00:42:30.910786948 1976-01-27 12:19:57.787330159 1976-08-13 12:05:04.034243737 1976-08-13 12:05:04.034243737 1976-01-27 12:19:57.787330159 1976-01-27 12:19:57.787330159 1976-08-13 12:05:04.034243737
-1978-08-05 14:41:05.501 1978-04-28 02:18:32.377543211 1978-11-13 01:03:38.624456789 1978-11-13 01:03:38.624456789 1978-04-28 02:18:32.377543211 1978-04-28 02:18:32.377543211 1978-11-13 01:03:38.624456789
-1981-04-25 09:01:12.077192689 1981-01-15 21:38:38.9537359 1981-08-02 21:23:45.200649478 1981-08-02 21:23:45.200649478 1981-01-15 21:38:38.9537359 1981-01-15 21:38:38.9537359 1981-08-02 21:23:45.200649478
-1981-11-15 23:03:10.999338387 1981-08-08 12:40:37.875881598 1982-02-23 10:25:44.122795176 1982-02-23 10:25:44.122795176 1981-08-08 12:40:37.875881598 1981-08-08 12:40:37.875881598 1982-02-23 10:25:44.122795176
-1985-07-20 09:30:11 1985-04-11 21:07:37.876543211 1985-10-27 19:52:44.123456789 1985-10-27 19:52:44.123456789 1985-04-11 21:07:37.876543211 1985-04-11 21:07:37.876543211 1985-10-27 19:52:44.123456789
-1985-11-18 16:37:54 1985-08-11 06:15:20.876543211 1986-02-26 04:00:27.123456789 1986-02-26 04:00:27.123456789 1985-08-11 06:15:20.876543211 1985-08-11 06:15:20.876543211 1986-02-26 04:00:27.123456789
-1987-02-21 19:48:29 1986-11-14 08:25:55.876543211 1987-06-01 08:11:02.123456789 1987-06-01 08:11:02.123456789 1986-11-14 08:25:55.876543211 1986-11-14 08:25:55.876543211 1987-06-01 08:11:02.123456789
-1987-05-28 13:52:07.900916635 1987-02-18 01:29:34.777459846 1987-09-05 01:14:41.024373424 1987-09-05 01:14:41.024373424 1987-02-18 01:29:34.777459846 1987-02-18 01:29:34.777459846 1987-09-05 01:14:41.024373424
-1998-10-16 20:05:29.397591987 1998-07-09 08:42:56.274135198 1999-01-24 06:28:02.521048776 1999-01-24 06:28:02.521048776 1998-07-09 08:42:56.274135198 1998-07-09 08:42:56.274135198 1999-01-24 06:28:02.521048776
-1999-10-03 16:59:10.396903939 1999-06-26 05:36:37.27344715 2000-01-11 03:21:43.520360728 2000-01-11 03:21:43.520360728 1999-06-26 05:36:37.27344715 1999-06-26 05:36:37.27344715 2000-01-11 03:21:43.520360728
-2000-12-18 08:42:30.000595596 2000-09-09 22:19:56.877138807 2001-03-27 20:05:03.124052385 2001-03-27 20:05:03.124052385 2000-09-09 22:19:56.877138807 2000-09-09 22:19:56.877138807 2001-03-27 20:05:03.124052385
-2002-05-10 05:29:48.990818073 2002-01-30 17:07:15.867361284 2002-08-17 16:52:22.114274862 2002-08-17 16:52:22.114274862 2002-01-30 17:07:15.867361284 2002-01-30 17:07:15.867361284 2002-08-17 16:52:22.114274862
-2003-09-23 22:33:17.00003252 2003-06-16 11:10:43.876575731 2004-01-01 08:55:50.123489309 2004-01-01 08:55:50.123489309 2003-06-16 11:10:43.876575731 2003-06-16 11:10:43.876575731 2004-01-01 08:55:50.123489309
-2004-03-07 20:14:13 2003-11-29 08:51:39.876543211 2004-06-15 08:36:46.123456789 2004-06-15 08:36:46.123456789 2003-11-29 08:51:39.876543211 2003-11-29 08:51:39.876543211 2004-06-15 08:36:46.123456789
-2007-02-09 05:17:29.368756876 2006-11-01 17:54:56.245300087 2007-05-19 17:40:02.492213665 2007-05-19 17:40:02.492213665 2006-11-01 17:54:56.245300087 2006-11-01 17:54:56.245300087 2007-05-19 17:40:02.492213665
-2009-01-21 10:49:07.108 2008-10-14 00:26:33.984543211 2009-04-30 23:11:40.231456789 2009-04-30 23:11:40.231456789 2008-10-14 00:26:33.984543211 2008-10-14 00:26:33.984543211 2009-04-30 23:11:40.231456789
-2010-04-08 02:43:35.861742727 2009-12-29 14:21:02.738285938 2010-07-16 14:06:08.985199516 2010-07-16 14:06:08.985199516 2009-12-29 14:21:02.738285938 2009-12-29 14:21:02.738285938 2010-07-16 14:06:08.985199516
-2013-04-07 02:44:43.00086821 2012-12-28 14:22:09.877411421 2013-07-15 14:07:16.124324999 2013-07-15 14:07:16.124324999 2012-12-28 14:22:09.877411421 2012-12-28 14:22:09.877411421 2013-07-15 14:07:16.124324999
-2013-04-10 00:43:46.854731546 2012-12-31 12:21:13.731274757 2013-07-18 12:06:19.978188335 2013-07-18 12:06:19.978188335 2012-12-31 12:21:13.731274757 2012-12-31 12:21:13.731274757 2013-07-18 12:06:19.978188335
-2021-09-24 03:18:32.413655165 2021-06-16 15:55:59.290198376 2022-01-01 13:41:05.537111954 2022-01-01 13:41:05.537111954 2021-06-16 15:55:59.290198376 2021-06-16 15:55:59.290198376 2022-01-01 13:41:05.537111954
-2024-11-11 16:42:41.101 2024-08-04 06:20:07.977543211 2025-02-19 04:05:14.224456789 2025-02-19 04:05:14.224456789 2024-08-04 06:20:07.977543211 2024-08-04 06:20:07.977543211 2025-02-19 04:05:14.224456789
+1966-08-16 13:36:50.183618031 1966-05-09 02:14:17.060161242 1966-11-24 00:59:23.30707482 1966-11-24 00:59:23.30707482 1966-05-09 02:14:17.060161242 1966-05-09 02:14:17.060161242 1966-11-24 00:59:23.30707482
+1973-04-17 06:30:38.596784156 1973-01-07 19:08:05.473327367 1973-07-25 17:53:11.720240945 1973-07-25 17:53:11.720240945 1973-01-07 19:08:05.473327367 1973-01-07 19:08:05.473327367 1973-07-25 17:53:11.720240945
+1974-10-04 17:21:03.989 1974-06-27 05:58:30.865543211 1975-01-12 04:43:37.112456789 1975-01-12 04:43:37.112456789 1974-06-27 05:58:30.865543211 1974-06-27 05:58:30.865543211 1975-01-12 04:43:37.112456789
+1976-03-03 04:54:33.000895162 1975-11-24 17:31:59.877438373 1976-06-10 16:17:06.124351951 1976-06-10 16:17:06.124351951 1975-11-24 17:31:59.877438373 1975-11-24 17:31:59.877438373 1976-06-10 16:17:06.124351951
+1976-05-06 00:42:30.910786948 1976-01-27 13:19:57.787330159 1976-08-13 12:05:04.034243737 1976-08-13 12:05:04.034243737 1976-01-27 13:19:57.787330159 1976-01-27 13:19:57.787330159 1976-08-13 12:05:04.034243737
+1978-08-05 14:41:05.501 1978-04-28 03:18:32.377543211 1978-11-13 02:03:38.624456789 1978-11-13 02:03:38.624456789 1978-04-28 03:18:32.377543211 1978-04-28 03:18:32.377543211 1978-11-13 02:03:38.624456789
+1981-04-25 09:01:12.077192689 1981-01-15 21:38:38.9537359 1981-08-02 20:23:45.200649478 1981-08-02 20:23:45.200649478 1981-01-15 21:38:38.9537359 1981-01-15 21:38:38.9537359 1981-08-02 20:23:45.200649478
+1981-11-15 23:03:10.999338387 1981-08-08 11:40:37.875881598 1982-02-23 10:25:44.122795176 1982-02-23 10:25:44.122795176 1981-08-08 11:40:37.875881598 1981-08-08 11:40:37.875881598 1982-02-23 10:25:44.122795176
+1985-07-20 09:30:11 1985-04-11 22:07:37.876543211 1985-10-27 20:52:44.123456789 1985-10-27 20:52:44.123456789 1985-04-11 22:07:37.876543211 1985-04-11 22:07:37.876543211 1985-10-27 20:52:44.123456789
+1985-11-18 16:37:54 1985-08-11 05:15:20.876543211 1986-02-26 04:00:27.123456789 1986-02-26 04:00:27.123456789 1985-08-11 05:15:20.876543211 1985-08-11 05:15:20.876543211 1986-02-26 04:00:27.123456789
+1987-02-21 19:48:29 1986-11-14 08:25:55.876543211 1987-06-01 07:11:02.123456789 1987-06-01 07:11:02.123456789 1986-11-14 08:25:55.876543211 1986-11-14 08:25:55.876543211 1987-06-01 07:11:02.123456789
+1987-05-28 13:52:07.900916635 1987-02-18 02:29:34.777459846 1987-09-05 01:14:41.024373424 1987-09-05 01:14:41.024373424 1987-02-18 02:29:34.777459846 1987-02-18 02:29:34.777459846 1987-09-05 01:14:41.024373424
+1998-10-16 20:05:29.397591987 1998-07-09 08:42:56.274135198 1999-01-24 07:28:02.521048776 1999-01-24 07:28:02.521048776 1998-07-09 08:42:56.274135198 1998-07-09 08:42:56.274135198 1999-01-24 07:28:02.521048776
+1999-10-03 16:59:10.396903939 1999-06-26 05:36:37.27344715 2000-01-11 04:21:43.520360728 2000-01-11 04:21:43.520360728 1999-06-26 05:36:37.27344715 1999-06-26 05:36:37.27344715 2000-01-11 04:21:43.520360728
+2000-12-18 08:42:30.000595596 2000-09-09 21:19:56.877138807 2001-03-27 20:05:03.124052385 2001-03-27 20:05:03.124052385 2000-09-09 21:19:56.877138807 2000-09-09 21:19:56.877138807 2001-03-27 20:05:03.124052385
+2002-05-10 05:29:48.990818073 2002-01-30 18:07:15.867361284 2002-08-17 16:52:22.114274862 2002-08-17 16:52:22.114274862 2002-01-30 18:07:15.867361284 2002-01-30 18:07:15.867361284 2002-08-17 16:52:22.114274862
+2003-09-23 22:33:17.00003252 2003-06-16 11:10:43.876575731 2004-01-01 09:55:50.123489309 2004-01-01 09:55:50.123489309 2003-06-16 11:10:43.876575731 2003-06-16 11:10:43.876575731 2004-01-01 09:55:50.123489309
+2004-03-07 20:14:13 2003-11-29 08:51:39.876543211 2004-06-15 07:36:46.123456789 2004-06-15 07:36:46.123456789 2003-11-29 08:51:39.876543211 2003-11-29 08:51:39.876543211 2004-06-15 07:36:46.123456789
+2007-02-09 05:17:29.368756876 2006-11-01 17:54:56.245300087 2007-05-19 16:40:02.492213665 2007-05-19 16:40:02.492213665 2006-11-01 17:54:56.245300087 2006-11-01 17:54:56.245300087 2007-05-19 16:40:02.492213665
+2009-01-21 10:49:07.108 2008-10-13 23:26:33.984543211 2009-04-30 22:11:40.231456789 2009-04-30 22:11:40.231456789 2008-10-13 23:26:33.984543211 2008-10-13 23:26:33.984543211 2009-04-30 22:11:40.231456789
+2010-04-08 02:43:35.861742727 2009-12-29 15:21:02.738285938 2010-07-16 14:06:08.985199516 2010-07-16 14:06:08.985199516 2009-12-29 15:21:02.738285938 2009-12-29 15:21:02.738285938 2010-07-16 14:06:08.985199516
+2013-04-07 02:44:43.00086821 2012-12-28 15:22:09.877411421 2013-07-15 14:07:16.124324999 2013-07-15 14:07:16.124324999 2012-12-28 15:22:09.877411421 2012-12-28 15:22:09.877411421 2013-07-15 14:07:16.124324999
+2013-04-10 00:43:46.854731546 2012-12-31 13:21:13.731274757 2013-07-18 12:06:19.978188335 2013-07-18 12:06:19.978188335 2012-12-31 13:21:13.731274757 2012-12-31 13:21:13.731274757 2013-07-18 12:06:19.978188335
+2021-09-24 03:18:32.413655165 2021-06-16 15:55:59.290198376 2022-01-01 14:41:05.537111954 2022-01-01 14:41:05.537111954 2021-06-16 15:55:59.290198376 2021-06-16 15:55:59.290198376 2022-01-01 14:41:05.537111954
+2024-11-11 16:42:41.101 2024-08-04 05:20:07.977543211 2025-02-19 04:05:14.224456789 2025-02-19 04:05:14.224456789 2024-08-04 05:20:07.977543211 2024-08-04 05:20:07.977543211 2025-02-19 04:05:14.224456789
4143-07-08 10:53:27.252802259 4143-03-30 23:30:54.12934547 4143-10-15 22:16:00.376259048 4143-10-15 22:16:00.376259048 4143-03-30 23:30:54.12934547 4143-03-30 23:30:54.12934547 4143-10-15 22:16:00.376259048
-4966-12-04 09:30:55.202 4966-08-26 23:08:22.078543211 4967-03-13 21:53:28.325456789 4967-03-13 21:53:28.325456789 4966-08-26 23:08:22.078543211 4966-08-26 23:08:22.078543211 4967-03-13 21:53:28.325456789
-5339-02-01 14:10:01.085678691 5338-10-25 03:47:27.962221902 5339-05-12 02:32:34.20913548 5339-05-12 02:32:34.20913548 5338-10-25 03:47:27.962221902 5338-10-25 03:47:27.962221902 5339-05-12 02:32:34.20913548
-5344-10-04 18:40:08.165 5344-06-27 07:17:35.041543211 5345-01-12 05:02:41.288456789 5345-01-12 05:02:41.288456789 5344-06-27 07:17:35.041543211 5344-06-27 07:17:35.041543211 5345-01-12 05:02:41.288456789
+4966-12-04 09:30:55.202 4966-08-26 22:08:22.078543211 4967-03-13 20:53:28.325456789 4967-03-13 20:53:28.325456789 4966-08-26 22:08:22.078543211 4966-08-26 22:08:22.078543211 4967-03-13 20:53:28.325456789
+5339-02-01 14:10:01.085678691 5338-10-25 02:47:27.962221902 5339-05-12 01:32:34.20913548 5339-05-12 01:32:34.20913548 5338-10-25 02:47:27.962221902 5338-10-25 02:47:27.962221902 5339-05-12 01:32:34.20913548
+5344-10-04 18:40:08.165 5344-06-27 07:17:35.041543211 5345-01-12 06:02:41.288456789 5345-01-12 06:02:41.288456789 5344-06-27 07:17:35.041543211 5344-06-27 07:17:35.041543211 5345-01-12 06:02:41.288456789
5397-07-13 07:12:32.000896438 5397-04-04 19:49:58.877439649 5397-10-20 18:35:05.124353227 5397-10-20 18:35:05.124353227 5397-04-04 19:49:58.877439649 5397-04-04 19:49:58.877439649 5397-10-20 18:35:05.124353227
5966-07-09 03:30:50.597 5966-03-31 16:08:17.473543211 5966-10-16 14:53:23.720456789 5966-10-16 14:53:23.720456789 5966-03-31 16:08:17.473543211 5966-03-31 16:08:17.473543211 5966-10-16 14:53:23.720456789
6229-06-28 02:54:28.970117179 6229-03-20 15:31:55.84666039 6229-10-05 14:17:02.093573968 6229-10-05 14:17:02.093573968 6229-03-20 15:31:55.84666039 6229-03-20 15:31:55.84666039 6229-10-05 14:17:02.093573968
-6482-04-27 12:07:38.073915413 6482-01-17 23:45:04.950458624 6482-08-04 23:30:11.197372202 6482-08-04 23:30:11.197372202 6482-01-17 23:45:04.950458624 6482-01-17 23:45:04.950458624 6482-08-04 23:30:11.197372202
-6631-11-13 16:31:29.702202248 6631-08-06 06:08:56.578745459 6632-02-21 03:54:02.825659037 6632-02-21 03:54:02.825659037 6631-08-06 06:08:56.578745459 6631-08-06 06:08:56.578745459 6632-02-21 03:54:02.825659037
-6705-09-28 18:27:28.000845672 6705-06-21 07:04:54.877388883 6706-01-06 04:50:01.124302461 6706-01-06 04:50:01.124302461 6705-06-21 07:04:54.877388883 6705-06-21 07:04:54.877388883 6706-01-06 04:50:01.124302461
-6731-02-12 08:12:48.287783702 6730-11-04 20:50:15.164326913 6731-05-22 20:35:21.411240491 6731-05-22 20:35:21.411240491 6730-11-04 20:50:15.164326913 6730-11-04 20:50:15.164326913 6731-05-22 20:35:21.411240491
-7160-12-02 06:00:24.81200852 7160-08-24 19:37:51.688551731 7161-03-11 17:22:57.935465309 7161-03-11 17:22:57.935465309 7160-08-24 19:37:51.688551731 7160-08-24 19:37:51.688551731 7161-03-11 17:22:57.935465309
-7409-09-07 23:33:32.459349602 7409-05-31 12:10:59.335892813 7409-12-16 09:56:05.582806391 7409-12-16 09:56:05.582806391 7409-05-31 12:10:59.335892813 7409-05-31 12:10:59.335892813 7409-12-16 09:56:05.582806391
+6482-04-27 12:07:38.073915413 6482-01-18 00:45:04.950458624 6482-08-04 23:30:11.197372202 6482-08-04 23:30:11.197372202 6482-01-18 00:45:04.950458624 6482-01-18 00:45:04.950458624 6482-08-04 23:30:11.197372202
+6631-11-13 16:31:29.702202248 6631-08-06 05:08:56.578745459 6632-02-21 03:54:02.825659037 6632-02-21 03:54:02.825659037 6631-08-06 05:08:56.578745459 6631-08-06 05:08:56.578745459 6632-02-21 03:54:02.825659037
+6705-09-28 18:27:28.000845672 6705-06-21 07:04:54.877388883 6706-01-06 05:50:01.124302461 6706-01-06 05:50:01.124302461 6705-06-21 07:04:54.877388883 6705-06-21 07:04:54.877388883 6706-01-06 05:50:01.124302461
+6731-02-12 08:12:48.287783702 6730-11-04 20:50:15.164326913 6731-05-22 19:35:21.411240491 6731-05-22 19:35:21.411240491 6730-11-04 20:50:15.164326913 6730-11-04 20:50:15.164326913 6731-05-22 19:35:21.411240491
+7160-12-02 06:00:24.81200852 7160-08-24 18:37:51.688551731 7161-03-11 17:22:57.935465309 7161-03-11 17:22:57.935465309 7160-08-24 18:37:51.688551731 7160-08-24 18:37:51.688551731 7161-03-11 17:22:57.935465309
+7409-09-07 23:33:32.459349602 7409-05-31 12:10:59.335892813 7409-12-16 10:56:05.582806391 7409-12-16 10:56:05.582806391 7409-05-31 12:10:59.335892813 7409-05-31 12:10:59.335892813 7409-12-16 10:56:05.582806391
7503-06-23 23:14:17.486 7503-03-16 11:51:44.362543211 7503-10-01 10:36:50.609456789 7503-10-01 10:36:50.609456789 7503-03-16 11:51:44.362543211 7503-03-16 11:51:44.362543211 7503-10-01 10:36:50.609456789
8422-07-22 03:21:45.745036084 8422-04-13 15:59:12.621579295 8422-10-29 14:44:18.868492873 8422-10-29 14:44:18.868492873 8422-04-13 15:59:12.621579295 8422-04-13 15:59:12.621579295 8422-10-29 14:44:18.868492873
-8521-01-16 20:42:05.668832388 8520-10-09 10:19:32.545375599 8521-04-26 09:04:38.792289177 8521-04-26 09:04:38.792289177 8520-10-09 10:19:32.545375599 8520-10-09 10:19:32.545375599 8521-04-26 09:04:38.792289177
-9075-06-13 16:20:09.218517797 9075-03-06 03:57:36.095061008 9075-09-21 03:42:42.341974586 9075-09-21 03:42:42.341974586 9075-03-06 03:57:36.095061008 9075-03-06 03:57:36.095061008 9075-09-21 03:42:42.341974586
-9209-11-11 04:08:58.223768453 9209-08-03 17:46:25.100311664 9210-02-18 15:31:31.347225242 9210-02-18 15:31:31.347225242 9209-08-03 17:46:25.100311664 9209-08-03 17:46:25.100311664 9210-02-18 15:31:31.347225242
-9403-01-09 18:12:33.547 9402-10-02 07:50:00.423543211 9403-04-19 06:35:06.670456789 9403-04-19 06:35:06.670456789 9402-10-02 07:50:00.423543211 9402-10-02 07:50:00.423543211 9403-04-19 06:35:06.670456789
+8521-01-16 20:42:05.668832388 8520-10-09 09:19:32.545375599 8521-04-26 08:04:38.792289177 8521-04-26 08:04:38.792289177 8520-10-09 09:19:32.545375599 8520-10-09 09:19:32.545375599 8521-04-26 08:04:38.792289177
+9075-06-13 16:20:09.218517797 9075-03-06 04:57:36.095061008 9075-09-21 03:42:42.341974586 9075-09-21 03:42:42.341974586 9075-03-06 04:57:36.095061008 9075-03-06 04:57:36.095061008 9075-09-21 03:42:42.341974586
+9209-11-11 04:08:58.223768453 9209-08-03 16:46:25.100311664 9210-02-18 15:31:31.347225242 9210-02-18 15:31:31.347225242 9209-08-03 16:46:25.100311664 9209-08-03 16:46:25.100311664 9210-02-18 15:31:31.347225242
+9403-01-09 18:12:33.547 9402-10-02 06:50:00.423543211 9403-04-19 05:35:06.670456789 9403-04-19 05:35:06.670456789 9402-10-02 06:50:00.423543211 9402-10-02 06:50:00.423543211 9403-04-19 05:35:06.670456789
PREHOOK: query: explain vectorization expression
select
interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
diff --git a/ql/src/test/results/clientpositive/llap_uncompressed.q.out b/ql/src/test/results/clientpositive/llap_uncompressed.q.out
index a11a30c612..fd3ed1dadf 100644
--- a/ql/src/test/results/clientpositive/llap_uncompressed.q.out
+++ b/ql/src/test/results/clientpositive/llap_uncompressed.q.out
@@ -137,7 +137,7 @@ POSTHOOK: query: select sum(hash(*)) from llap_temp_table
POSTHOOK: type: QUERY
POSTHOOK: Input: default@llap_temp_table
#### A masked pattern was here ####
--42787391908
+-107598224420
PREHOOK: query: explain
select * from orc_llap_n0 where cint > 10 and cint < 5000000
PREHOOK: type: QUERY
diff --git a/ql/src/test/results/clientpositive/localtimezone.q.out b/ql/src/test/results/clientpositive/localtimezone.q.out
index a76e4a3db4..b4c6d86247 100644
--- a/ql/src/test/results/clientpositive/localtimezone.q.out
+++ b/ql/src/test/results/clientpositive/localtimezone.q.out
@@ -19,24 +19,24 @@ POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@date_test
PREHOOK: query: insert into `date_test` VALUES
- ('2011-01-01 01:01:01.123'),
- ('2011-01-01 01:01:01.123 Europe/Rome'),
- ('2011-01-01 01:01:01.123 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912'),
- ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
- ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912 xyz')
+ (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
PREHOOK: type: QUERY
PREHOOK: Input: _dummy_database@_dummy_table
PREHOOK: Output: default@date_test
POSTHOOK: query: insert into `date_test` VALUES
- ('2011-01-01 01:01:01.123'),
- ('2011-01-01 01:01:01.123 Europe/Rome'),
- ('2011-01-01 01:01:01.123 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912'),
- ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
- ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912 xyz')
+ (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
POSTHOOK: Output: default@date_test
@@ -50,24 +50,24 @@ POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@timestamp_test
PREHOOK: query: insert into `timestamp_test` VALUES
- ('2011-01-01 01:01:01.123'),
- ('2011-01-01 01:01:01.123 Europe/Rome'),
- ('2011-01-01 01:01:01.123 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912'),
- ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
- ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912 xyz')
+ (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
PREHOOK: type: QUERY
PREHOOK: Input: _dummy_database@_dummy_table
PREHOOK: Output: default@timestamp_test
POSTHOOK: query: insert into `timestamp_test` VALUES
- ('2011-01-01 01:01:01.123'),
- ('2011-01-01 01:01:01.123 Europe/Rome'),
- ('2011-01-01 01:01:01.123 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912'),
- ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
- ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912 xyz')
+ (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
POSTHOOK: Output: default@timestamp_test
@@ -81,24 +81,24 @@ POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@timestamptz_test
PREHOOK: query: insert into `timestamptz_test` VALUES
- ('2011-01-01 01:01:01.123'),
- ('2011-01-01 01:01:01.123 Europe/Rome'),
- ('2011-01-01 01:01:01.123 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912'),
- ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
- ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912 xyz')
+ (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
PREHOOK: type: QUERY
PREHOOK: Input: _dummy_database@_dummy_table
PREHOOK: Output: default@timestamptz_test
POSTHOOK: query: insert into `timestamptz_test` VALUES
- ('2011-01-01 01:01:01.123'),
- ('2011-01-01 01:01:01.123 Europe/Rome'),
- ('2011-01-01 01:01:01.123 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912'),
- ('2011-01-01 01:01:01.12345678912 Europe/Rome'),
- ('2011-01-01 01:01:01.12345678912 GMT-05:00'),
- ('2011-01-01 01:01:01.12345678912 xyz')
+ (cast('2011-01-01 01:01:01.123' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.123 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 Europe/Rome' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 GMT-05:00' as timestamp with local time zone)),
+ (cast('2011-01-01 01:01:01.12345678912 xyz' as timestamp with local time zone))
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
POSTHOOK: Output: default@timestamptz_test
diff --git a/ql/src/test/results/clientpositive/localtimezone2.q.out b/ql/src/test/results/clientpositive/localtimezone2.q.out
new file mode 100644
index 0000000000..fb4bd17376
--- /dev/null
+++ b/ql/src/test/results/clientpositive/localtimezone2.q.out
@@ -0,0 +1,148 @@
+PREHOOK: query: drop table `table_tsltz`
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table `table_tsltz`
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE table_tsltz (tz VARCHAR(200),
+ c_ts1 TIMESTAMP,
+ c_ts2 TIMESTAMP,
+ c_tsltz1 TIMESTAMP WITH LOCAL TIME ZONE,
+ c_tsltz2 TIMESTAMP WITH LOCAL TIME ZONE)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@table_tsltz
+POSTHOOK: query: CREATE TABLE table_tsltz (tz VARCHAR(200),
+ c_ts1 TIMESTAMP,
+ c_ts2 TIMESTAMP,
+ c_tsltz1 TIMESTAMP WITH LOCAL TIME ZONE,
+ c_tsltz2 TIMESTAMP WITH LOCAL TIME ZONE)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@table_tsltz
+PREHOOK: query: insert into table_tsltz values (
+ '-08:00',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@table_tsltz
+POSTHOOK: query: insert into table_tsltz values (
+ '-08:00',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@table_tsltz
+POSTHOOK: Lineage: table_tsltz.c_ts1 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_ts2 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_tsltz1 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_tsltz2 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.tz SCRIPT []
+PREHOOK: query: insert into table_tsltz values (
+ 'UTC',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@table_tsltz
+POSTHOOK: query: insert into table_tsltz values (
+ 'UTC',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@table_tsltz
+POSTHOOK: Lineage: table_tsltz.c_ts1 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_ts2 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_tsltz1 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_tsltz2 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.tz SCRIPT []
+PREHOOK: query: insert into table_tsltz values (
+ '+02:00',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@table_tsltz
+POSTHOOK: query: insert into table_tsltz values (
+ '+02:00',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@table_tsltz
+POSTHOOK: Lineage: table_tsltz.c_ts1 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_ts2 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_tsltz1 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_tsltz2 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.tz SCRIPT []
+PREHOOK: query: insert into table_tsltz values (
+ 'US/Pacific',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@table_tsltz
+POSTHOOK: query: insert into table_tsltz values (
+ 'US/Pacific',
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone),
+ cast('2016-01-01 00:00:00' as timestamp),
+ cast('2016-01-01 00:00:00 -05:00' as timestamp with local time zone))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@table_tsltz
+POSTHOOK: Lineage: table_tsltz.c_ts1 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_ts2 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_tsltz1 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.c_tsltz2 SCRIPT []
+POSTHOOK: Lineage: table_tsltz.tz SCRIPT []
+PREHOOK: query: select tz,
+ c_ts1, c_ts2,
+ cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
+from table_tsltz
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table_tsltz
+#### A masked pattern was here ####
+POSTHOOK: query: select tz,
+ c_ts1, c_ts2,
+ cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
+from table_tsltz
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table_tsltz
+#### A masked pattern was here ####
+-08:00 2016-01-01 00:00:00 2015-12-31 21:00:00 2016-01-01 00:00:00.0 US/Pacific 2015-12-31 21:00:00.0 US/Pacific
+UTC 2016-01-01 00:00:00 2016-01-01 05:00:00 2015-12-31 16:00:00.0 US/Pacific 2015-12-31 21:00:00.0 US/Pacific
++02:00 2016-01-01 00:00:00 2016-01-01 07:00:00 2015-12-31 14:00:00.0 US/Pacific 2015-12-31 21:00:00.0 US/Pacific
+US/Pacific 2016-01-01 00:00:00 2015-12-31 21:00:00 2016-01-01 00:00:00.0 US/Pacific 2015-12-31 21:00:00.0 US/Pacific
+PREHOOK: query: select tz,
+ c_ts1, c_ts2,
+ cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
+from table_tsltz
+PREHOOK: type: QUERY
+PREHOOK: Input: default@table_tsltz
+#### A masked pattern was here ####
+POSTHOOK: query: select tz,
+ c_ts1, c_ts2,
+ cast(c_tsltz1 as VARCHAR(200)) as c_tsltz1, cast(c_tsltz2 as VARCHAR(200)) as c_tsltz2
+from table_tsltz
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@table_tsltz
+#### A masked pattern was here ####
+-08:00 2016-01-01 00:00:00 2015-12-31 21:00:00 2016-01-01 08:00:00.0 UTC 2016-01-01 05:00:00.0 UTC
+UTC 2016-01-01 00:00:00 2016-01-01 05:00:00 2016-01-01 00:00:00.0 UTC 2016-01-01 05:00:00.0 UTC
++02:00 2016-01-01 00:00:00 2016-01-01 07:00:00 2015-12-31 22:00:00.0 UTC 2016-01-01 05:00:00.0 UTC
+US/Pacific 2016-01-01 00:00:00 2015-12-31 21:00:00 2016-01-01 08:00:00.0 UTC 2016-01-01 05:00:00.0 UTC
diff --git a/ql/src/test/results/clientpositive/orc_file_dump.q.out b/ql/src/test/results/clientpositive/orc_file_dump.q.out
index 2067145ce5..31c23214b3 100644
--- a/ql/src/test/results/clientpositive/orc_file_dump.q.out
+++ b/ql/src/test/results/clientpositive/orc_file_dump.q.out
@@ -102,34 +102,34 @@ Type: struct
+Compression size: 131072
+Type: struct
Stripe Statistics:
Stripe 1:
@@ -161,8 +161,8 @@ Stripe Statistics:
Column 6: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
Column 7: count: 12288 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 442368
Column 8: count: 12288 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 884736
- Column 9: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
- Column 10: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
+ Column 9: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+ Column 10: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
Column 11: count: 9174 hasNull: true true: 6138
Column 12: count: 9173 hasNull: true true: 3983
Column 13: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -173,8 +173,8 @@ Stripe Statistics:
Column 18: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
Column 19: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
Column 20: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
- Column 21: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
- Column 22: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
+ Column 21: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+ Column 22: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
Column 23: count: 9174 hasNull: true true: 6138
Column 24: count: 9173 hasNull: true true: 3983
Column 25: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -185,8 +185,8 @@ Stripe Statistics:
Column 30: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
Column 31: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
Column 32: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
- Column 33: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
- Column 34: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
+ Column 33: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+ Column 34: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
Column 35: count: 9174 hasNull: true true: 6138
Column 36: count: 9173 hasNull: true true: 3983
Stripe 2:
@@ -199,8 +199,8 @@ Stripe Statistics:
Column 6: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
Column 7: count: 12288 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 442368
Column 8: count: 12288 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 884736
- Column 9: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
- Column 10: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
+ Column 9: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+ Column 10: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
Column 11: count: 9174 hasNull: true true: 6138
Column 12: count: 9173 hasNull: true true: 3983
Column 13: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -211,8 +211,8 @@ Stripe Statistics:
Column 18: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
Column 19: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
Column 20: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
- Column 21: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
- Column 22: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
+ Column 21: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+ Column 22: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
Column 23: count: 9174 hasNull: true true: 6138
Column 24: count: 9173 hasNull: true true: 3983
Column 25: count: 9173 hasNull: true min: -64 max: 62 sum: -39856
@@ -223,8 +223,8 @@ Stripe Statistics:
Column 30: count: 9174 hasNull: true min: -16379.0 max: 9763215.5639 sum: 5.62236530305E7
Column 31: count: 9174 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 127881
Column 32: count: 9173 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 149134
- Column 33: count: 9173 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
- Column 34: count: 9174 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
+ Column 33: count: 9173 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+ Column 34: count: 9174 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
Column 35: count: 9174 hasNull: true true: 6138
Column 36: count: 9173 hasNull: true true: 3983
@@ -238,8 +238,8 @@ File Statistics:
Column 6: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8
Column 7: count: 24576 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 884736
Column 8: count: 24576 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 1769472
- Column 9: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
- Column 10: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
+ Column 9: count: 18346 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+ Column 10: count: 18348 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
Column 11: count: 18348 hasNull: true true: 12276
Column 12: count: 18346 hasNull: true true: 7966
Column 13: count: 18346 hasNull: true min: -64 max: 62 sum: -79712
@@ -250,8 +250,8 @@ File Statistics:
Column 18: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8
Column 19: count: 18348 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 255762
Column 20: count: 18346 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 298268
- Column 21: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
- Column 22: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
+ Column 21: count: 18346 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+ Column 22: count: 18348 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
Column 23: count: 18348 hasNull: true true: 12276
Column 24: count: 18346 hasNull: true true: 7966
Column 25: count: 18346 hasNull: true min: -64 max: 62 sum: -79712
@@ -262,136 +262,136 @@ File Statistics:
Column 30: count: 18348 hasNull: true min: -16379.0 max: 9763215.5639 sum: 1.12447306061E8
Column 31: count: 18348 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yy2GiGM sum: 255762
Column 32: count: 18346 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 298268
- Column 33: count: 18346 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
- Column 34: count: 18348 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808
+ Column 33: count: 18346 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
+ Column 34: count: 18348 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808
Column 35: count: 18348 hasNull: true true: 12276
Column 36: count: 18346 hasNull: true true: 7966
Stripes:
- Stripe: offset: 3 data: 1500017 rows: 12288 tail: 501 index: 2836
+ Stripe: offset: 3 data: 1498336 rows: 12288 tail: 493 index: 2821
Stream: column 0 section ROW_INDEX start: 3 length 21
Stream: column 1 section ROW_INDEX start: 24 length 53
Stream: column 2 section ROW_INDEX start: 77 length 67
- Stream: column 3 section ROW_INDEX start: 144 length 81
- Stream: column 4 section ROW_INDEX start: 225 length 83
- Stream: column 5 section ROW_INDEX start: 308 length 77
- Stream: column 6 section ROW_INDEX start: 385 length 77
- Stream: column 7 section ROW_INDEX start: 462 length 176
- Stream: column 8 section ROW_INDEX start: 638 length 267
- Stream: column 9 section ROW_INDEX start: 905 length 63
- Stream: column 10 section ROW_INDEX start: 968 length 57
- Stream: column 11 section ROW_INDEX start: 1025 length 47
- Stream: column 12 section ROW_INDEX start: 1072 length 47
- Stream: column 13 section ROW_INDEX start: 1119 length 53
- Stream: column 14 section ROW_INDEX start: 1172 length 67
- Stream: column 15 section ROW_INDEX start: 1239 length 81
- Stream: column 16 section ROW_INDEX start: 1320 length 83
- Stream: column 17 section ROW_INDEX start: 1403 length 77
- Stream: column 18 section ROW_INDEX start: 1480 length 77
- Stream: column 19 section ROW_INDEX start: 1557 length 115
- Stream: column 20 section ROW_INDEX start: 1672 length 93
- Stream: column 21 section ROW_INDEX start: 1765 length 63
- Stream: column 22 section ROW_INDEX start: 1828 length 57
- Stream: column 23 section ROW_INDEX start: 1885 length 47
- Stream: column 24 section ROW_INDEX start: 1932 length 47
- Stream: column 25 section ROW_INDEX start: 1979 length 53
- Stream: column 26 section ROW_INDEX start: 2032 length 67
- Stream: column 27 section ROW_INDEX start: 2099 length 81
- Stream: column 28 section ROW_INDEX start: 2180 length 83
- Stream: column 29 section ROW_INDEX start: 2263 length 77
- Stream: column 30 section ROW_INDEX start: 2340 length 77
- Stream: column 31 section ROW_INDEX start: 2417 length 115
- Stream: column 32 section ROW_INDEX start: 2532 length 93
- Stream: column 33 section ROW_INDEX start: 2625 length 63
- Stream: column 34 section ROW_INDEX start: 2688 length 57
- Stream: column 35 section ROW_INDEX start: 2745 length 47
- Stream: column 36 section ROW_INDEX start: 2792 length 47
- Stream: column 1 section PRESENT start: 2839 length 51
- Stream: column 1 section DATA start: 2890 length 5448
- Stream: column 2 section PRESENT start: 8338 length 53
- Stream: column 2 section DATA start: 8391 length 12144
- Stream: column 3 section PRESENT start: 20535 length 53
- Stream: column 3 section DATA start: 20588 length 24618
- Stream: column 4 section PRESENT start: 45206 length 52
- Stream: column 4 section DATA start: 45258 length 24681
- Stream: column 5 section PRESENT start: 69939 length 51
- Stream: column 5 section DATA start: 69990 length 9927
- Stream: column 6 section PRESENT start: 79917 length 53
- Stream: column 6 section DATA start: 79970 length 19755
- Stream: column 7 section DATA start: 99725 length 258570
- Stream: column 7 section LENGTH start: 358295 length 108
- Stream: column 8 section DATA start: 358403 length 517341
- Stream: column 8 section LENGTH start: 875744 length 108
- Stream: column 9 section PRESENT start: 875852 length 52
- Stream: column 9 section DATA start: 875904 length 8045
- Stream: column 9 section SECONDARY start: 883949 length 9555
- Stream: column 10 section PRESENT start: 893504 length 58
- Stream: column 10 section DATA start: 893562 length 8082
- Stream: column 10 section SECONDARY start: 901644 length 9590
- Stream: column 11 section PRESENT start: 911234 length 51
- Stream: column 11 section DATA start: 911285 length 782
- Stream: column 12 section PRESENT start: 912067 length 54
- Stream: column 12 section DATA start: 912121 length 783
- Stream: column 13 section PRESENT start: 912904 length 51
- Stream: column 13 section DATA start: 912955 length 5448
- Stream: column 14 section PRESENT start: 918403 length 53
- Stream: column 14 section DATA start: 918456 length 12144
- Stream: column 15 section PRESENT start: 930600 length 53
- Stream: column 15 section DATA start: 930653 length 24618
- Stream: column 16 section PRESENT start: 955271 length 52
- Stream: column 16 section DATA start: 955323 length 24681
- Stream: column 17 section PRESENT start: 980004 length 51
- Stream: column 17 section DATA start: 980055 length 9927
- Stream: column 18 section PRESENT start: 989982 length 53
- Stream: column 18 section DATA start: 990035 length 19755
- Stream: column 19 section PRESENT start: 1009790 length 51
- Stream: column 19 section DATA start: 1009841 length 11009
- Stream: column 19 section LENGTH start: 1020850 length 3722
- Stream: column 19 section DICTIONARY_DATA start: 1024572 length 65435
- Stream: column 20 section PRESENT start: 1090007 length 54
- Stream: column 20 section DATA start: 1090061 length 11006
- Stream: column 20 section LENGTH start: 1101067 length 3739
- Stream: column 20 section DICTIONARY_DATA start: 1104806 length 66022
- Stream: column 21 section PRESENT start: 1170828 length 52
- Stream: column 21 section DATA start: 1170880 length 8045
- Stream: column 21 section SECONDARY start: 1178925 length 9555
- Stream: column 22 section PRESENT start: 1188480 length 58
- Stream: column 22 section DATA start: 1188538 length 8082
- Stream: column 22 section SECONDARY start: 1196620 length 9590
- Stream: column 23 section PRESENT start: 1206210 length 51
- Stream: column 23 section DATA start: 1206261 length 782
- Stream: column 24 section PRESENT start: 1207043 length 54
- Stream: column 24 section DATA start: 1207097 length 783
- Stream: column 25 section PRESENT start: 1207880 length 51
- Stream: column 25 section DATA start: 1207931 length 5448
- Stream: column 26 section PRESENT start: 1213379 length 53
- Stream: column 26 section DATA start: 1213432 length 12144
- Stream: column 27 section PRESENT start: 1225576 length 53
- Stream: column 27 section DATA start: 1225629 length 24618
- Stream: column 28 section PRESENT start: 1250247 length 52
- Stream: column 28 section DATA start: 1250299 length 24681
- Stream: column 29 section PRESENT start: 1274980 length 51
- Stream: column 29 section DATA start: 1275031 length 9927
- Stream: column 30 section PRESENT start: 1284958 length 53
- Stream: column 30 section DATA start: 1285011 length 19755
- Stream: column 31 section PRESENT start: 1304766 length 51
- Stream: column 31 section DATA start: 1304817 length 11009
- Stream: column 31 section LENGTH start: 1315826 length 3722
- Stream: column 31 section DICTIONARY_DATA start: 1319548 length 65435
- Stream: column 32 section PRESENT start: 1384983 length 54
- Stream: column 32 section DATA start: 1385037 length 11006
- Stream: column 32 section LENGTH start: 1396043 length 3739
- Stream: column 32 section DICTIONARY_DATA start: 1399782 length 66022
- Stream: column 33 section PRESENT start: 1465804 length 52
- Stream: column 33 section DATA start: 1465856 length 8045
- Stream: column 33 section SECONDARY start: 1473901 length 9555
- Stream: column 34 section PRESENT start: 1483456 length 58
- Stream: column 34 section DATA start: 1483514 length 8082
- Stream: column 34 section SECONDARY start: 1491596 length 9590
- Stream: column 35 section PRESENT start: 1501186 length 51
- Stream: column 35 section DATA start: 1501237 length 782
- Stream: column 36 section PRESENT start: 1502019 length 54
- Stream: column 36 section DATA start: 1502073 length 783
+ Stream: column 3 section ROW_INDEX start: 144 length 79
+ Stream: column 4 section ROW_INDEX start: 223 length 83
+ Stream: column 5 section ROW_INDEX start: 306 length 77
+ Stream: column 6 section ROW_INDEX start: 383 length 77
+ Stream: column 7 section ROW_INDEX start: 460 length 170
+ Stream: column 8 section ROW_INDEX start: 630 length 264
+ Stream: column 9 section ROW_INDEX start: 894 length 63
+ Stream: column 10 section ROW_INDEX start: 957 length 57
+ Stream: column 11 section ROW_INDEX start: 1014 length 47
+ Stream: column 12 section ROW_INDEX start: 1061 length 47
+ Stream: column 13 section ROW_INDEX start: 1108 length 53
+ Stream: column 14 section ROW_INDEX start: 1161 length 67
+ Stream: column 15 section ROW_INDEX start: 1228 length 79
+ Stream: column 16 section ROW_INDEX start: 1307 length 83
+ Stream: column 17 section ROW_INDEX start: 1390 length 77
+ Stream: column 18 section ROW_INDEX start: 1467 length 77
+ Stream: column 19 section ROW_INDEX start: 1544 length 115
+ Stream: column 20 section ROW_INDEX start: 1659 length 93
+ Stream: column 21 section ROW_INDEX start: 1752 length 63
+ Stream: column 22 section ROW_INDEX start: 1815 length 57
+ Stream: column 23 section ROW_INDEX start: 1872 length 47
+ Stream: column 24 section ROW_INDEX start: 1919 length 47
+ Stream: column 25 section ROW_INDEX start: 1966 length 53
+ Stream: column 26 section ROW_INDEX start: 2019 length 67
+ Stream: column 27 section ROW_INDEX start: 2086 length 79
+ Stream: column 28 section ROW_INDEX start: 2165 length 83
+ Stream: column 29 section ROW_INDEX start: 2248 length 77
+ Stream: column 30 section ROW_INDEX start: 2325 length 77
+ Stream: column 31 section ROW_INDEX start: 2402 length 115
+ Stream: column 32 section ROW_INDEX start: 2517 length 93
+ Stream: column 33 section ROW_INDEX start: 2610 length 63
+ Stream: column 34 section ROW_INDEX start: 2673 length 57
+ Stream: column 35 section ROW_INDEX start: 2730 length 47
+ Stream: column 36 section ROW_INDEX start: 2777 length 47
+ Stream: column 1 section PRESENT start: 2824 length 51
+ Stream: column 1 section DATA start: 2875 length 5448
+ Stream: column 2 section PRESENT start: 8323 length 53
+ Stream: column 2 section DATA start: 8376 length 12078
+ Stream: column 3 section PRESENT start: 20454 length 53
+ Stream: column 3 section DATA start: 20507 length 24479
+ Stream: column 4 section PRESENT start: 44986 length 52
+ Stream: column 4 section DATA start: 45038 length 24479
+ Stream: column 5 section PRESENT start: 69517 length 51
+ Stream: column 5 section DATA start: 69568 length 9927
+ Stream: column 6 section PRESENT start: 79495 length 53
+ Stream: column 6 section DATA start: 79548 length 19755
+ Stream: column 7 section DATA start: 99303 length 259558
+ Stream: column 7 section LENGTH start: 358861 length 12
+ Stream: column 8 section DATA start: 358873 length 518777
+ Stream: column 8 section LENGTH start: 877650 length 12
+ Stream: column 9 section PRESENT start: 877662 length 52
+ Stream: column 9 section DATA start: 877714 length 7769
+ Stream: column 9 section SECONDARY start: 885483 length 9448
+ Stream: column 10 section PRESENT start: 894931 length 58
+ Stream: column 10 section DATA start: 894989 length 7778
+ Stream: column 10 section SECONDARY start: 902767 length 9469
+ Stream: column 11 section PRESENT start: 912236 length 51
+ Stream: column 11 section DATA start: 912287 length 782
+ Stream: column 12 section PRESENT start: 913069 length 54
+ Stream: column 12 section DATA start: 913123 length 783
+ Stream: column 13 section PRESENT start: 913906 length 51
+ Stream: column 13 section DATA start: 913957 length 5448
+ Stream: column 14 section PRESENT start: 919405 length 53
+ Stream: column 14 section DATA start: 919458 length 12078
+ Stream: column 15 section PRESENT start: 931536 length 53
+ Stream: column 15 section DATA start: 931589 length 24479
+ Stream: column 16 section PRESENT start: 956068 length 52
+ Stream: column 16 section DATA start: 956120 length 24479
+ Stream: column 17 section PRESENT start: 980599 length 51
+ Stream: column 17 section DATA start: 980650 length 9927
+ Stream: column 18 section PRESENT start: 990577 length 53
+ Stream: column 18 section DATA start: 990630 length 19755
+ Stream: column 19 section PRESENT start: 1010385 length 51
+ Stream: column 19 section DATA start: 1010436 length 10942
+ Stream: column 19 section LENGTH start: 1021378 length 3722
+ Stream: column 19 section DICTIONARY_DATA start: 1025100 length 65435
+ Stream: column 20 section PRESENT start: 1090535 length 54
+ Stream: column 20 section DATA start: 1090589 length 10939
+ Stream: column 20 section LENGTH start: 1101528 length 3739
+ Stream: column 20 section DICTIONARY_DATA start: 1105267 length 66022
+ Stream: column 21 section PRESENT start: 1171289 length 52
+ Stream: column 21 section DATA start: 1171341 length 7769
+ Stream: column 21 section SECONDARY start: 1179110 length 9448
+ Stream: column 22 section PRESENT start: 1188558 length 58
+ Stream: column 22 section DATA start: 1188616 length 7778
+ Stream: column 22 section SECONDARY start: 1196394 length 9469
+ Stream: column 23 section PRESENT start: 1205863 length 51
+ Stream: column 23 section DATA start: 1205914 length 782
+ Stream: column 24 section PRESENT start: 1206696 length 54
+ Stream: column 24 section DATA start: 1206750 length 783
+ Stream: column 25 section PRESENT start: 1207533 length 51
+ Stream: column 25 section DATA start: 1207584 length 5448
+ Stream: column 26 section PRESENT start: 1213032 length 53
+ Stream: column 26 section DATA start: 1213085 length 12078
+ Stream: column 27 section PRESENT start: 1225163 length 53
+ Stream: column 27 section DATA start: 1225216 length 24479
+ Stream: column 28 section PRESENT start: 1249695 length 52
+ Stream: column 28 section DATA start: 1249747 length 24479
+ Stream: column 29 section PRESENT start: 1274226 length 51
+ Stream: column 29 section DATA start: 1274277 length 9927
+ Stream: column 30 section PRESENT start: 1284204 length 53
+ Stream: column 30 section DATA start: 1284257 length 19755
+ Stream: column 31 section PRESENT start: 1304012 length 51
+ Stream: column 31 section DATA start: 1304063 length 10942
+ Stream: column 31 section LENGTH start: 1315005 length 3722
+ Stream: column 31 section DICTIONARY_DATA start: 1318727 length 65435
+ Stream: column 32 section PRESENT start: 1384162 length 54
+ Stream: column 32 section DATA start: 1384216 length 10939
+ Stream: column 32 section LENGTH start: 1395155 length 3739
+ Stream: column 32 section DICTIONARY_DATA start: 1398894 length 66022
+ Stream: column 33 section PRESENT start: 1464916 length 52
+ Stream: column 33 section DATA start: 1464968 length 7769
+ Stream: column 33 section SECONDARY start: 1472737 length 9448
+ Stream: column 34 section PRESENT start: 1482185 length 58
+ Stream: column 34 section DATA start: 1482243 length 7778
+ Stream: column 34 section SECONDARY start: 1490021 length 9469
+ Stream: column 35 section PRESENT start: 1499490 length 51
+ Stream: column 35 section DATA start: 1499541 length 782
+ Stream: column 36 section PRESENT start: 1500323 length 54
+ Stream: column 36 section DATA start: 1500377 length 783
Encoding column 0: DIRECT
Encoding column 1: DIRECT
Encoding column 2: DIRECT_V2
@@ -437,13 +437,13 @@ Stripes:
Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
Row group indices for column 2:
Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
+ Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
Row group indices for column 3:
Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
+ Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
Row group indices for column 4:
Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
+ Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
Row group indices for column 5:
Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -452,16 +452,16 @@ Stripes:
Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
Row group indices for column 7:
Entry 0: count: 10000 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 360000 positions: 0,0,0,0,0
- Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153190,97856,0,9766,272
+ Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153708,97856,0,76,272
Row group indices for column 8:
Entry 0: count: 10000 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 720000 positions: 0,0,0,0,0
- Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 306445,195712,0,9766,272
+ Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 384237,64640,0,76,272
Row group indices for column 9:
- Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
+ Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
Row group indices for column 10:
- Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
+ Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
Row group indices for column 11:
Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -473,13 +473,13 @@ Stripes:
Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
Row group indices for column 14:
Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
+ Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
Row group indices for column 15:
Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
+ Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
Row group indices for column 16:
Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
+ Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
Row group indices for column 17:
Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -488,16 +488,16 @@ Stripes:
Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
Row group indices for column 19:
Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
+ Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
Row group indices for column 20:
Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
+ Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
Row group indices for column 21:
- Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
+ Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
Row group indices for column 22:
- Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
+ Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
Row group indices for column 23:
Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -509,13 +509,13 @@ Stripes:
Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
Row group indices for column 26:
Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
+ Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
Row group indices for column 27:
Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
+ Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
Row group indices for column 28:
Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
+ Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
Row group indices for column 29:
Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -524,146 +524,146 @@ Stripes:
Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
Row group indices for column 31:
Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
+ Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
Row group indices for column 32:
Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
+ Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
Row group indices for column 33:
- Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
+ Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
Row group indices for column 34:
- Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
+ Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
Row group indices for column 35:
Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
Row group indices for column 36:
Entry 0: count: 6889 hasNull: true true: 3402 positions: 0,0,0,0,0,0,0,0
Entry 1: count: 2284 hasNull: true true: 581 positions: 0,168,8,0,0,520,97,1
- Stripe: offset: 1503357 data: 1500017 rows: 12288 tail: 501 index: 2836
- Stream: column 0 section ROW_INDEX start: 1503357 length 21
- Stream: column 1 section ROW_INDEX start: 1503378 length 53
- Stream: column 2 section ROW_INDEX start: 1503431 length 67
- Stream: column 3 section ROW_INDEX start: 1503498 length 81
- Stream: column 4 section ROW_INDEX start: 1503579 length 83
- Stream: column 5 section ROW_INDEX start: 1503662 length 77
- Stream: column 6 section ROW_INDEX start: 1503739 length 77
- Stream: column 7 section ROW_INDEX start: 1503816 length 176
- Stream: column 8 section ROW_INDEX start: 1503992 length 267
- Stream: column 9 section ROW_INDEX start: 1504259 length 63
- Stream: column 10 section ROW_INDEX start: 1504322 length 57
- Stream: column 11 section ROW_INDEX start: 1504379 length 47
- Stream: column 12 section ROW_INDEX start: 1504426 length 47
- Stream: column 13 section ROW_INDEX start: 1504473 length 53
- Stream: column 14 section ROW_INDEX start: 1504526 length 67
- Stream: column 15 section ROW_INDEX start: 1504593 length 81
- Stream: column 16 section ROW_INDEX start: 1504674 length 83
- Stream: column 17 section ROW_INDEX start: 1504757 length 77
- Stream: column 18 section ROW_INDEX start: 1504834 length 77
- Stream: column 19 section ROW_INDEX start: 1504911 length 115
- Stream: column 20 section ROW_INDEX start: 1505026 length 93
- Stream: column 21 section ROW_INDEX start: 1505119 length 63
- Stream: column 22 section ROW_INDEX start: 1505182 length 57
- Stream: column 23 section ROW_INDEX start: 1505239 length 47
- Stream: column 24 section ROW_INDEX start: 1505286 length 47
- Stream: column 25 section ROW_INDEX start: 1505333 length 53
- Stream: column 26 section ROW_INDEX start: 1505386 length 67
- Stream: column 27 section ROW_INDEX start: 1505453 length 81
- Stream: column 28 section ROW_INDEX start: 1505534 length 83
- Stream: column 29 section ROW_INDEX start: 1505617 length 77
- Stream: column 30 section ROW_INDEX start: 1505694 length 77
- Stream: column 31 section ROW_INDEX start: 1505771 length 115
- Stream: column 32 section ROW_INDEX start: 1505886 length 93
- Stream: column 33 section ROW_INDEX start: 1505979 length 63
- Stream: column 34 section ROW_INDEX start: 1506042 length 57
- Stream: column 35 section ROW_INDEX start: 1506099 length 47
- Stream: column 36 section ROW_INDEX start: 1506146 length 47
- Stream: column 1 section PRESENT start: 1506193 length 51
- Stream: column 1 section DATA start: 1506244 length 5448
- Stream: column 2 section PRESENT start: 1511692 length 53
- Stream: column 2 section DATA start: 1511745 length 12144
- Stream: column 3 section PRESENT start: 1523889 length 53
- Stream: column 3 section DATA start: 1523942 length 24618
- Stream: column 4 section PRESENT start: 1548560 length 52
- Stream: column 4 section DATA start: 1548612 length 24681
- Stream: column 5 section PRESENT start: 1573293 length 51
- Stream: column 5 section DATA start: 1573344 length 9927
- Stream: column 6 section PRESENT start: 1583271 length 53
- Stream: column 6 section DATA start: 1583324 length 19755
- Stream: column 7 section DATA start: 1603079 length 258570
- Stream: column 7 section LENGTH start: 1861649 length 108
- Stream: column 8 section DATA start: 1861757 length 517341
- Stream: column 8 section LENGTH start: 2379098 length 108
- Stream: column 9 section PRESENT start: 2379206 length 52
- Stream: column 9 section DATA start: 2379258 length 8045
- Stream: column 9 section SECONDARY start: 2387303 length 9555
- Stream: column 10 section PRESENT start: 2396858 length 58
- Stream: column 10 section DATA start: 2396916 length 8082
- Stream: column 10 section SECONDARY start: 2404998 length 9590
- Stream: column 11 section PRESENT start: 2414588 length 51
- Stream: column 11 section DATA start: 2414639 length 782
- Stream: column 12 section PRESENT start: 2415421 length 54
- Stream: column 12 section DATA start: 2415475 length 783
- Stream: column 13 section PRESENT start: 2416258 length 51
- Stream: column 13 section DATA start: 2416309 length 5448
- Stream: column 14 section PRESENT start: 2421757 length 53
- Stream: column 14 section DATA start: 2421810 length 12144
- Stream: column 15 section PRESENT start: 2433954 length 53
- Stream: column 15 section DATA start: 2434007 length 24618
- Stream: column 16 section PRESENT start: 2458625 length 52
- Stream: column 16 section DATA start: 2458677 length 24681
- Stream: column 17 section PRESENT start: 2483358 length 51
- Stream: column 17 section DATA start: 2483409 length 9927
- Stream: column 18 section PRESENT start: 2493336 length 53
- Stream: column 18 section DATA start: 2493389 length 19755
- Stream: column 19 section PRESENT start: 2513144 length 51
- Stream: column 19 section DATA start: 2513195 length 11009
- Stream: column 19 section LENGTH start: 2524204 length 3722
- Stream: column 19 section DICTIONARY_DATA start: 2527926 length 65435
- Stream: column 20 section PRESENT start: 2593361 length 54
- Stream: column 20 section DATA start: 2593415 length 11006
- Stream: column 20 section LENGTH start: 2604421 length 3739
- Stream: column 20 section DICTIONARY_DATA start: 2608160 length 66022
- Stream: column 21 section PRESENT start: 2674182 length 52
- Stream: column 21 section DATA start: 2674234 length 8045
- Stream: column 21 section SECONDARY start: 2682279 length 9555
- Stream: column 22 section PRESENT start: 2691834 length 58
- Stream: column 22 section DATA start: 2691892 length 8082
- Stream: column 22 section SECONDARY start: 2699974 length 9590
- Stream: column 23 section PRESENT start: 2709564 length 51
- Stream: column 23 section DATA start: 2709615 length 782
- Stream: column 24 section PRESENT start: 2710397 length 54
- Stream: column 24 section DATA start: 2710451 length 783
- Stream: column 25 section PRESENT start: 2711234 length 51
- Stream: column 25 section DATA start: 2711285 length 5448
- Stream: column 26 section PRESENT start: 2716733 length 53
- Stream: column 26 section DATA start: 2716786 length 12144
- Stream: column 27 section PRESENT start: 2728930 length 53
- Stream: column 27 section DATA start: 2728983 length 24618
- Stream: column 28 section PRESENT start: 2753601 length 52
- Stream: column 28 section DATA start: 2753653 length 24681
- Stream: column 29 section PRESENT start: 2778334 length 51
- Stream: column 29 section DATA start: 2778385 length 9927
- Stream: column 30 section PRESENT start: 2788312 length 53
- Stream: column 30 section DATA start: 2788365 length 19755
- Stream: column 31 section PRESENT start: 2808120 length 51
- Stream: column 31 section DATA start: 2808171 length 11009
- Stream: column 31 section LENGTH start: 2819180 length 3722
- Stream: column 31 section DICTIONARY_DATA start: 2822902 length 65435
- Stream: column 32 section PRESENT start: 2888337 length 54
- Stream: column 32 section DATA start: 2888391 length 11006
- Stream: column 32 section LENGTH start: 2899397 length 3739
- Stream: column 32 section DICTIONARY_DATA start: 2903136 length 66022
- Stream: column 33 section PRESENT start: 2969158 length 52
- Stream: column 33 section DATA start: 2969210 length 8045
- Stream: column 33 section SECONDARY start: 2977255 length 9555
- Stream: column 34 section PRESENT start: 2986810 length 58
- Stream: column 34 section DATA start: 2986868 length 8082
- Stream: column 34 section SECONDARY start: 2994950 length 9590
- Stream: column 35 section PRESENT start: 3004540 length 51
- Stream: column 35 section DATA start: 3004591 length 782
- Stream: column 36 section PRESENT start: 3005373 length 54
- Stream: column 36 section DATA start: 3005427 length 783
+ Stripe: offset: 1501653 data: 1498336 rows: 12288 tail: 493 index: 2821
+ Stream: column 0 section ROW_INDEX start: 1501653 length 21
+ Stream: column 1 section ROW_INDEX start: 1501674 length 53
+ Stream: column 2 section ROW_INDEX start: 1501727 length 67
+ Stream: column 3 section ROW_INDEX start: 1501794 length 79
+ Stream: column 4 section ROW_INDEX start: 1501873 length 83
+ Stream: column 5 section ROW_INDEX start: 1501956 length 77
+ Stream: column 6 section ROW_INDEX start: 1502033 length 77
+ Stream: column 7 section ROW_INDEX start: 1502110 length 170
+ Stream: column 8 section ROW_INDEX start: 1502280 length 264
+ Stream: column 9 section ROW_INDEX start: 1502544 length 63
+ Stream: column 10 section ROW_INDEX start: 1502607 length 57
+ Stream: column 11 section ROW_INDEX start: 1502664 length 47
+ Stream: column 12 section ROW_INDEX start: 1502711 length 47
+ Stream: column 13 section ROW_INDEX start: 1502758 length 53
+ Stream: column 14 section ROW_INDEX start: 1502811 length 67
+ Stream: column 15 section ROW_INDEX start: 1502878 length 79
+ Stream: column 16 section ROW_INDEX start: 1502957 length 83
+ Stream: column 17 section ROW_INDEX start: 1503040 length 77
+ Stream: column 18 section ROW_INDEX start: 1503117 length 77
+ Stream: column 19 section ROW_INDEX start: 1503194 length 115
+ Stream: column 20 section ROW_INDEX start: 1503309 length 93
+ Stream: column 21 section ROW_INDEX start: 1503402 length 63
+ Stream: column 22 section ROW_INDEX start: 1503465 length 57
+ Stream: column 23 section ROW_INDEX start: 1503522 length 47
+ Stream: column 24 section ROW_INDEX start: 1503569 length 47
+ Stream: column 25 section ROW_INDEX start: 1503616 length 53
+ Stream: column 26 section ROW_INDEX start: 1503669 length 67
+ Stream: column 27 section ROW_INDEX start: 1503736 length 79
+ Stream: column 28 section ROW_INDEX start: 1503815 length 83
+ Stream: column 29 section ROW_INDEX start: 1503898 length 77
+ Stream: column 30 section ROW_INDEX start: 1503975 length 77
+ Stream: column 31 section ROW_INDEX start: 1504052 length 115
+ Stream: column 32 section ROW_INDEX start: 1504167 length 93
+ Stream: column 33 section ROW_INDEX start: 1504260 length 63
+ Stream: column 34 section ROW_INDEX start: 1504323 length 57
+ Stream: column 35 section ROW_INDEX start: 1504380 length 47
+ Stream: column 36 section ROW_INDEX start: 1504427 length 47
+ Stream: column 1 section PRESENT start: 1504474 length 51
+ Stream: column 1 section DATA start: 1504525 length 5448
+ Stream: column 2 section PRESENT start: 1509973 length 53
+ Stream: column 2 section DATA start: 1510026 length 12078
+ Stream: column 3 section PRESENT start: 1522104 length 53
+ Stream: column 3 section DATA start: 1522157 length 24479
+ Stream: column 4 section PRESENT start: 1546636 length 52
+ Stream: column 4 section DATA start: 1546688 length 24479
+ Stream: column 5 section PRESENT start: 1571167 length 51
+ Stream: column 5 section DATA start: 1571218 length 9927
+ Stream: column 6 section PRESENT start: 1581145 length 53
+ Stream: column 6 section DATA start: 1581198 length 19755
+ Stream: column 7 section DATA start: 1600953 length 259558
+ Stream: column 7 section LENGTH start: 1860511 length 12
+ Stream: column 8 section DATA start: 1860523 length 518777
+ Stream: column 8 section LENGTH start: 2379300 length 12
+ Stream: column 9 section PRESENT start: 2379312 length 52
+ Stream: column 9 section DATA start: 2379364 length 7769
+ Stream: column 9 section SECONDARY start: 2387133 length 9448
+ Stream: column 10 section PRESENT start: 2396581 length 58
+ Stream: column 10 section DATA start: 2396639 length 7778
+ Stream: column 10 section SECONDARY start: 2404417 length 9469
+ Stream: column 11 section PRESENT start: 2413886 length 51
+ Stream: column 11 section DATA start: 2413937 length 782
+ Stream: column 12 section PRESENT start: 2414719 length 54
+ Stream: column 12 section DATA start: 2414773 length 783
+ Stream: column 13 section PRESENT start: 2415556 length 51
+ Stream: column 13 section DATA start: 2415607 length 5448
+ Stream: column 14 section PRESENT start: 2421055 length 53
+ Stream: column 14 section DATA start: 2421108 length 12078
+ Stream: column 15 section PRESENT start: 2433186 length 53
+ Stream: column 15 section DATA start: 2433239 length 24479
+ Stream: column 16 section PRESENT start: 2457718 length 52
+ Stream: column 16 section DATA start: 2457770 length 24479
+ Stream: column 17 section PRESENT start: 2482249 length 51
+ Stream: column 17 section DATA start: 2482300 length 9927
+ Stream: column 18 section PRESENT start: 2492227 length 53
+ Stream: column 18 section DATA start: 2492280 length 19755
+ Stream: column 19 section PRESENT start: 2512035 length 51
+ Stream: column 19 section DATA start: 2512086 length 10942
+ Stream: column 19 section LENGTH start: 2523028 length 3722
+ Stream: column 19 section DICTIONARY_DATA start: 2526750 length 65435
+ Stream: column 20 section PRESENT start: 2592185 length 54
+ Stream: column 20 section DATA start: 2592239 length 10939
+ Stream: column 20 section LENGTH start: 2603178 length 3739
+ Stream: column 20 section DICTIONARY_DATA start: 2606917 length 66022
+ Stream: column 21 section PRESENT start: 2672939 length 52
+ Stream: column 21 section DATA start: 2672991 length 7769
+ Stream: column 21 section SECONDARY start: 2680760 length 9448
+ Stream: column 22 section PRESENT start: 2690208 length 58
+ Stream: column 22 section DATA start: 2690266 length 7778
+ Stream: column 22 section SECONDARY start: 2698044 length 9469
+ Stream: column 23 section PRESENT start: 2707513 length 51
+ Stream: column 23 section DATA start: 2707564 length 782
+ Stream: column 24 section PRESENT start: 2708346 length 54
+ Stream: column 24 section DATA start: 2708400 length 783
+ Stream: column 25 section PRESENT start: 2709183 length 51
+ Stream: column 25 section DATA start: 2709234 length 5448
+ Stream: column 26 section PRESENT start: 2714682 length 53
+ Stream: column 26 section DATA start: 2714735 length 12078
+ Stream: column 27 section PRESENT start: 2726813 length 53
+ Stream: column 27 section DATA start: 2726866 length 24479
+ Stream: column 28 section PRESENT start: 2751345 length 52
+ Stream: column 28 section DATA start: 2751397 length 24479
+ Stream: column 29 section PRESENT start: 2775876 length 51
+ Stream: column 29 section DATA start: 2775927 length 9927
+ Stream: column 30 section PRESENT start: 2785854 length 53
+ Stream: column 30 section DATA start: 2785907 length 19755
+ Stream: column 31 section PRESENT start: 2805662 length 51
+ Stream: column 31 section DATA start: 2805713 length 10942
+ Stream: column 31 section LENGTH start: 2816655 length 3722
+ Stream: column 31 section DICTIONARY_DATA start: 2820377 length 65435
+ Stream: column 32 section PRESENT start: 2885812 length 54
+ Stream: column 32 section DATA start: 2885866 length 10939
+ Stream: column 32 section LENGTH start: 2896805 length 3739
+ Stream: column 32 section DICTIONARY_DATA start: 2900544 length 66022
+ Stream: column 33 section PRESENT start: 2966566 length 52
+ Stream: column 33 section DATA start: 2966618 length 7769
+ Stream: column 33 section SECONDARY start: 2974387 length 9448
+ Stream: column 34 section PRESENT start: 2983835 length 58
+ Stream: column 34 section DATA start: 2983893 length 7778
+ Stream: column 34 section SECONDARY start: 2991671 length 9469
+ Stream: column 35 section PRESENT start: 3001140 length 51
+ Stream: column 35 section DATA start: 3001191 length 782
+ Stream: column 36 section PRESENT start: 3001973 length 54
+ Stream: column 36 section DATA start: 3002027 length 783
Encoding column 0: DIRECT
Encoding column 1: DIRECT
Encoding column 2: DIRECT_V2
@@ -709,13 +709,13 @@ Stripes:
Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
Row group indices for column 2:
Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
+ Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
Row group indices for column 3:
Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
+ Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
Row group indices for column 4:
Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
+ Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
Row group indices for column 5:
Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -724,16 +724,16 @@ Stripes:
Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
Row group indices for column 7:
Entry 0: count: 10000 hasNull: false min: 00020767-dd8f-4f4d-bd68-4b7be64b8e44 max: fffa3516-e219-4027-b0d3-72bb2e676c52 sum: 360000 positions: 0,0,0,0,0
- Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153190,97856,0,9766,272
+ Entry 1: count: 2288 hasNull: false min: 002d8ccb-a094-4d10-b283-999770cf8488 max: ffacef94-41da-4230-807a-509bbf50b057 sum: 82368 positions: 153708,97856,0,76,272
Row group indices for column 8:
Entry 0: count: 10000 hasNull: false min: 000976f7-7075-4f3f-a564-5a375fafcc101416a2b7-7f64-41b7-851f-97d15405037e max: fffd0642-5f01-48cd-8d97-3428faee49e9b39f2b4c-efdc-4e5f-9ab5-4aa5394cb156 sum: 720000 positions: 0,0,0,0,0
- Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 306445,195712,0,9766,272
+ Entry 1: count: 2288 hasNull: false min: 00124556-8383-44c4-a28b-7a413de74ccc4137606f-2cf7-43fb-beff-b6d374fd15ec max: ffde3bce-bb56-4fa9-81d7-146ca2eab946225c18e0-0002-4d07-9853-12c92c0f5637 sum: 164736 positions: 384237,64640,0,76,272
Row group indices for column 9:
- Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
+ Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
Row group indices for column 10:
- Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
+ Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
Row group indices for column 11:
Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -745,13 +745,13 @@ Stripes:
Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
Row group indices for column 14:
Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
+ Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
Row group indices for column 15:
Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
+ Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
Row group indices for column 16:
Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
+ Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
Row group indices for column 17:
Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -760,16 +760,16 @@ Stripes:
Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
Row group indices for column 19:
Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
+ Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
Row group indices for column 20:
Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
+ Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
Row group indices for column 21:
- Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
+ Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
Row group indices for column 22:
- Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
+ Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
Row group indices for column 23:
Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -781,13 +781,13 @@ Stripes:
Entry 1: count: 1264 hasNull: true min: -64 max: 62 sum: 10347 positions: 0,182,99,0,0,5937,2
Row group indices for column 26:
Entry 0: count: 7924 hasNull: true min: -16379 max: 16376 sum: 9298530 positions: 0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,15334,272
+ Entry 1: count: 1250 hasNull: true min: -16309 max: 16331 sum: -1862540 positions: 0,126,96,0,0,10231,272
Row group indices for column 27:
Entry 0: count: 7139 hasNull: true min: -1073051226 max: 1073680599 sum: 1417841516466 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,28584,0
+ Entry 1: count: 2034 hasNull: true min: -1073279343 max: 1072872630 sum: 20209347319 positions: 0,128,98,0,0,16332,0
Row group indices for column 28:
Entry 0: count: 6889 hasNull: true min: -2147311592 max: 2144325818 sum: -24788202148 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,26534,262
+ Entry 1: count: 2284 hasNull: true min: -2144905793 max: 2145498388 sum: -1673671826261 positions: 0,168,7,0,0,18366,262
Row group indices for column 29:
Entry 0: count: 7909 hasNull: true min: -64.0 max: 79.5530014038086 sum: -49823.35599219799 positions: 0,0,0,0,0,0
Entry 1: count: 1264 hasNull: true min: -64.0 max: 62.0 sum: 10343.719999313354 positions: 0,182,99,0,0,31636
@@ -796,16 +796,16 @@ Stripes:
Entry 1: count: 1250 hasNull: true min: -16309.0 max: 9763215.5639 sum: 7897951.792899999 positions: 0,126,96,0,0,63392
Row group indices for column 31:
Entry 0: count: 7140 hasNull: true min: 0042l0d5rPD6sMlJ7Ue0q max: yxN0212hM17E8J8bJj8D7b sum: 99028 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,14308,0
+ Entry 1: count: 2034 hasNull: true min: 006bb3K max: yy2GiGM sum: 28853 positions: 0,126,98,0,0,8182,0
Row group indices for column 32:
Entry 0: count: 6889 hasNull: true min: 0034fkcXMQI3 max: yyt0S8WorA sum: 109415 positions: 0,0,0,0,0,0,0
- Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,13280,262
+ Entry 1: count: 2284 hasNull: true min: 004J8y max: yjDBo sum: 39719 positions: 0,168,8,0,0,9196,262
Row group indices for column 33:
- Entry 0: count: 7909 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1264 hasNull: true min: 1969-12-31 13:59:43.64 max: 1969-12-31 14:00:30.808 positions: 0,182,100,0,0,30619,258,0,15332,258
+ Entry 0: count: 7909 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1264 hasNull: true min: 1969-12-31 15:59:43.64 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:43.64 max UTC: 1969-12-31 08:00:30.808 positions: 0,182,100,0,0,22588,218,0,11248,258
Row group indices for column 34:
- Entry 0: count: 7924 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
- Entry 1: count: 1250 hasNull: true min: 1969-12-31 13:59:30.929 max: 1969-12-31 14:00:30.808 positions: 0,126,97,0,0,30619,273,0,15334,272
+ Entry 0: count: 7924 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,0,0,0,0,0,0,0,0,0
+ Entry 1: count: 1250 hasNull: true min: 1969-12-31 15:59:30.929 max: 1969-12-31 16:00:30.808 min UTC: 1969-12-31 07:59:30.929 max UTC: 1969-12-31 08:00:30.808 positions: 0,126,97,0,0,20399,273,0,10229,272
Row group indices for column 35:
Entry 0: count: 7140 hasNull: true true: 5115 positions: 0,0,0,0,0,0,0,0
Entry 1: count: 2034 hasNull: true true: 1023 positions: 0,126,98,0,0,520,126,4
@@ -813,7 +813,7 @@ Stripes:
Entry 0: count: 6889 hasNull: true true: 3402 positions: 0,0,0,0,0,0,0,0
Entry 1: count: 2284 hasNull: true true: 581 positions: 0,168,8,0,0,520,97,1
-File length: 3007982 bytes
+File length: 3004630 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
diff --git a/ql/src/test/results/clientpositive/orc_merge5.q.out b/ql/src/test/results/clientpositive/orc_merge5.q.out
index 0e87ce6dba..af99fc8dc9 100644
--- a/ql/src/test/results/clientpositive/orc_merge5.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge5.q.out
@@ -38,17 +38,17 @@ STAGE PLANS:
TableScan
alias: orc_merge5_n5
filterExpr: (userid <= 13L) (type: boolean)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (userid <= 13L) (type: boolean)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -57,7 +57,7 @@ STAGE PLANS:
Select Operator
expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp)
outputColumnNames: userid, string1, subtype, decimal1, ts
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
mode: hash
@@ -118,9 +118,15 @@ PREHOOK: Input: default@orc_merge5b_n0
PREHOOK: Output: default@orc_merge5b_n0
POSTHOOK: query: analyze table orc_merge5b_n0 compute statistics noscan
POSTHOOK: type: QUERY
+<<<<<<< HEAD
POSTHOOK: Input: default@orc_merge5b_n0
POSTHOOK: Output: default@orc_merge5b_n0
Found 3 items
+=======
+POSTHOOK: Input: default@orc_merge5b
+POSTHOOK: Output: default@orc_merge5b
+Found 1 items
+>>>>>>> HIVE-12192
#### A masked pattern was here ####
PREHOOK: query: select * from orc_merge5b_n0
PREHOOK: type: QUERY
@@ -154,17 +160,17 @@ STAGE PLANS:
TableScan
alias: orc_merge5_n5
filterExpr: (userid <= 13L) (type: boolean)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (userid <= 13L) (type: boolean)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -173,7 +179,7 @@ STAGE PLANS:
Select Operator
expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp)
outputColumnNames: userid, string1, subtype, decimal1, ts
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
mode: hash
@@ -297,9 +303,15 @@ PREHOOK: Input: default@orc_merge5b_n0
PREHOOK: Output: default@orc_merge5b_n0
POSTHOOK: query: analyze table orc_merge5b_n0 compute statistics noscan
POSTHOOK: type: QUERY
+<<<<<<< HEAD
POSTHOOK: Input: default@orc_merge5b_n0
POSTHOOK: Output: default@orc_merge5b_n0
Found 3 items
+=======
+POSTHOOK: Input: default@orc_merge5b
+POSTHOOK: Output: default@orc_merge5b
+Found 1 items
+>>>>>>> HIVE-12192
#### A masked pattern was here ####
PREHOOK: query: select * from orc_merge5b_n0
PREHOOK: type: QUERY
diff --git a/ql/src/test/results/clientpositive/orc_merge6.q.out b/ql/src/test/results/clientpositive/orc_merge6.q.out
index 39813b76f5..7613c401d0 100644
--- a/ql/src/test/results/clientpositive/orc_merge6.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge6.q.out
@@ -38,17 +38,17 @@ STAGE PLANS:
TableScan
alias: orc_merge5_n4
filterExpr: (userid <= 13L) (type: boolean)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (userid <= 13L) (type: boolean)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -57,18 +57,18 @@ STAGE PLANS:
Select Operator
expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int)
outputColumnNames: userid, string1, subtype, decimal1, ts, year, hour
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
keys: year (type: string), hour (type: int)
mode: hash
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string), _col1 (type: int)
sort order: ++
Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct)
Reduce Operator Tree:
Group By Operator
@@ -76,14 +76,14 @@ STAGE PLANS:
keys: KEY._col0 (type: string), KEY._col1 (type: int)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col0 (type: string), _col1 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -153,12 +153,19 @@ PREHOOK: Output: default@orc_merge5a_n1
PREHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
POSTHOOK: query: analyze table orc_merge5a_n1 partition(year="2001",hour=24) compute statistics noscan
POSTHOOK: type: QUERY
+<<<<<<< HEAD
POSTHOOK: Input: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
Found 3 items
+=======
+POSTHOOK: Input: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a@year=2001/hour=24
+Found 1 items
+>>>>>>> HIVE-12192
#### A masked pattern was here ####
-Found 3 items
+Found 1 items
#### A masked pattern was here ####
PREHOOK: query: show partitions orc_merge5a_n1
PREHOOK: type: SHOWPARTITIONS
@@ -207,17 +214,17 @@ STAGE PLANS:
TableScan
alias: orc_merge5_n4
filterExpr: (userid <= 13L) (type: boolean)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (userid <= 13L) (type: boolean)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -226,18 +233,18 @@ STAGE PLANS:
Select Operator
expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), '2000' (type: string), UDFToInteger('24') (type: int)
outputColumnNames: userid, string1, subtype, decimal1, ts, year, hour
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
keys: year (type: string), hour (type: int)
mode: hash
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string), _col1 (type: int)
sort order: ++
Map-reduce partition columns: _col0 (type: string), _col1 (type: int)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
value expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct)
Reduce Operator Tree:
Group By Operator
@@ -245,14 +252,14 @@ STAGE PLANS:
keys: KEY._col0 (type: string), KEY._col1 (type: int)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col6 (type: struct), _col0 (type: string), _col1 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -427,12 +434,19 @@ PREHOOK: Output: default@orc_merge5a_n1
PREHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
POSTHOOK: query: analyze table orc_merge5a_n1 partition(year="2001",hour=24) compute statistics noscan
POSTHOOK: type: QUERY
+<<<<<<< HEAD
POSTHOOK: Input: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
Found 3 items
+=======
+POSTHOOK: Input: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a
+POSTHOOK: Output: default@orc_merge5a@year=2001/hour=24
+Found 1 items
+>>>>>>> HIVE-12192
#### A masked pattern was here ####
-Found 3 items
+Found 1 items
#### A masked pattern was here ####
PREHOOK: query: show partitions orc_merge5a_n1
PREHOOK: type: SHOWPARTITIONS
diff --git a/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out b/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
index 5a1b00b4d1..56fc7e9088 100644
--- a/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge_incompat1.q.out
@@ -36,18 +36,23 @@ STAGE PLANS:
Map Reduce
Map Operator Tree:
TableScan
+<<<<<<< HEAD
alias: orc_merge5_n3
Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+=======
+ alias: orc_merge5
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
+>>>>>>> HIVE-12192
Filter Operator
predicate: (userid <= 13L) (type: boolean)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -56,7 +61,7 @@ STAGE PLANS:
Select Operator
expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp)
outputColumnNames: userid, string1, subtype, decimal1, ts
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
mode: hash
diff --git a/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out b/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
index 0b76bfbb66..f6870f589a 100644
--- a/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge_incompat2.q.out
@@ -37,14 +37,14 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: orc_merge5
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: userid (type: bigint), string1 (type: string), subtype (type: double), decimal1 (type: decimal(10,0)), ts (type: timestamp), subtype (type: double)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -53,18 +53,18 @@ STAGE PLANS:
Select Operator
expressions: _col0 (type: bigint), _col1 (type: string), _col2 (type: double), _col3 (type: decimal(10,0)), _col4 (type: timestamp), _col5 (type: double)
outputColumnNames: userid, string1, subtype, decimal1, ts, st
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: compute_stats(userid, 'hll'), compute_stats(string1, 'hll'), compute_stats(subtype, 'hll'), compute_stats(decimal1, 'hll'), compute_stats(ts, 'hll')
keys: st (type: double)
mode: hash
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: double)
sort order: +
Map-reduce partition columns: _col0 (type: double)
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct)
Reduce Operator Tree:
Group By Operator
@@ -72,14 +72,14 @@ STAGE PLANS:
keys: KEY._col0 (type: double)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col1 (type: struct), _col2 (type: struct), _col3 (type: struct), _col4 (type: struct), _col5 (type: struct), _col0 (type: double)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 2464020 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 22980 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
diff --git a/ql/src/test/results/clientpositive/orc_ppd_char.q.out b/ql/src/test/results/clientpositive/orc_ppd_char.q.out
index 846de53ff4..16ae483cba 100644
--- a/ql/src/test/results/clientpositive/orc_ppd_char.q.out
+++ b/ql/src/test/results/clientpositive/orc_ppd_char.q.out
@@ -26,8 +26,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
-252951929000
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple"
+=======
+-250934600000
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c="apple"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -35,8 +40,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
-252951929000
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
+=======
+-250934600000
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c!="apple"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -44,8 +54,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
334427804500
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
+=======
+336445133500
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c!="apple"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -53,8 +68,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c!="apple"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
334427804500
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
+=======
+336445133500
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c<"hello"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -62,8 +82,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
-252951929000
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
+=======
+-250934600000
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c<"hello"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -71,8 +96,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<"hello"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
-252951929000
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
+=======
+-250934600000
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c<="hello"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -80,8 +110,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
81475875500
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
+=======
+85510533500
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c<="hello"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -89,8 +124,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c<="hello"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
81475875500
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
+=======
+85510533500
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c="apple "
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -98,8 +138,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
-252951929000
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
+=======
+-250934600000
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c="apple "
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -107,8 +152,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c="apple "
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
-252951929000
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "carrot")
+=======
+-250934600000
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c in ("apple", "carrot")
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -116,8 +166,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "c
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
-252951929000
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "carrot")
+=======
+-250934600000
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c in ("apple", "carrot")
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -125,8 +180,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "c
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
-252951929000
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "hello")
+=======
+-250934600000
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c in ("apple", "hello")
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -134,8 +194,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "h
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
81475875500
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "hello")
+=======
+85510533500
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c in ("apple", "hello")
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -143,8 +208,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("apple", "h
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
81475875500
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c in ("carrot")
+=======
+85510533500
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c in ("carrot")
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -170,8 +240,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
-252951929000
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple" and "carrot"
+=======
+-250934600000
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c between "apple" and "carrot"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -179,8 +254,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
-252951929000
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple" and "zombie"
+=======
+-250934600000
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c between "apple" and "zombie"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -188,8 +268,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
81475875500
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple" and "zombie"
+=======
+85510533500
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c between "apple" and "zombie"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
@@ -197,8 +282,13 @@ POSTHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "apple"
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
+<<<<<<< HEAD
81475875500
PREHOOK: query: select sum(hash(*)) from newtypesorc_n4 where c between "carrot" and "carrot1"
+=======
+85510533500
+PREHOOK: query: select sum(hash(*)) from newtypesorc where c between "carrot" and "carrot1"
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@newtypesorc_n4
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out b/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
index 6dd6e3f2dd..f1a137ce5f 100644
--- a/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
+++ b/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
@@ -24,8 +24,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -57,8 +57,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -87,8 +87,8 @@ STAGE PLANS:
Filter Vectorization:
className: VectorFilterOperator
native: true
- predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 11.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val 12.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
- predicate: (((UDFToDouble(ctimestamp1) > 11.0D) and (UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+ predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28789.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28788.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+ predicate: (((UDFToDouble(ctimestamp1) > -28789.0D) and (UDFToDouble(ctimestamp2) <> -28788.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
Statistics: Num rows: 2730 Data size: 32760 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -246,8 +246,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -280,8 +280,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -355,8 +355,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -388,8 +388,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -417,8 +417,8 @@ STAGE PLANS:
Filter Vectorization:
className: VectorFilterOperator
native: true
- predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -1.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -1.3359999999999999)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
- predicate: (((UDFToDouble(ctimestamp1) > -1.388D) and (UDFToDouble(ctimestamp2) <> -1.3359999999999999D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+ predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28801.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28801.336)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+ predicate: (((UDFToDouble(ctimestamp1) > -28801.388D) and (UDFToDouble(ctimestamp2) <> -28801.336D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
Statistics: Num rows: 2730 Data size: 32760 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -563,8 +563,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -597,8 +597,8 @@ FROM alltypesparquet
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out b/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
index e33e70108c..357d83809d 100644
--- a/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
+++ b/ql/src/test/results/clientpositive/parquet_vectorization_7.q.out
@@ -16,11 +16,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -43,11 +43,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -74,8 +74,8 @@ STAGE PLANS:
Filter Vectorization:
className: VectorFilterOperator
native: true
- predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -15.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
- predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -15.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+ predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28815.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+ predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28815.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
Statistics: Num rows: 5461 Data size: 65532 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -155,11 +155,11 @@ PREHOOK: query: SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -183,11 +183,11 @@ POSTHOOK: query: SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -237,11 +237,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -264,11 +264,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -294,8 +294,8 @@ STAGE PLANS:
Filter Vectorization:
className: VectorFilterOperator
native: true
- predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 7.6850000000000005)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
- predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > 7.6850000000000005D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+ predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28792.315)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+ predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28792.315D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
Statistics: Num rows: 5461 Data size: 65532 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -369,11 +369,11 @@ PREHOOK: query: SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -397,11 +397,11 @@ POSTHOOK: query: SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesparquet
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
index 485bfe7506..5a8c069b98 100644
--- a/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
+++ b/ql/src/test/results/clientpositive/parquet_vectorization_decimal_date.q.out
@@ -89,13 +89,13 @@ POSTHOOK: query: SELECT cdate, cdecimal from date_decimal_test_parquet where cin
POSTHOOK: type: QUERY
POSTHOOK: Input: default@date_decimal_test_parquet
#### A masked pattern was here ####
-1970-01-06 -7959.5837837838
-1970-01-06 -2516.4135135135
-1970-01-06 -9445.0621621622
-1970-01-06 -5713.7459459459
-1970-01-06 8963.6405405405
-1970-01-06 4193.6243243243
-1970-01-06 2964.3864864865
-1970-01-06 -4673.2540540541
-1970-01-06 -9216.8945945946
-1970-01-06 -9287.3756756757
+1970-01-07 -7959.5837837838
+1970-01-07 -2516.4135135135
+1970-01-07 -9445.0621621622
+1970-01-07 -5713.7459459459
+1970-01-07 8963.6405405405
+1970-01-07 4193.6243243243
+1970-01-07 2964.3864864865
+1970-01-07 -4673.2540540541
+1970-01-07 -9216.8945945946
+1970-01-07 -9287.3756756757
diff --git a/ql/src/test/results/clientpositive/timestamp_dst.q.out b/ql/src/test/results/clientpositive/timestamp_dst.q.out
new file mode 100644
index 0000000000..72a5ebf63e
--- /dev/null
+++ b/ql/src/test/results/clientpositive/timestamp_dst.q.out
@@ -0,0 +1,9 @@
+PREHOOK: query: select TIMESTAMP '2015-03-08 02:10:00.101'
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select TIMESTAMP '2015-03-08 02:10:00.101'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2015-03-08 02:10:00.101
diff --git a/ql/src/test/results/clientpositive/timestamp_udf.q.out b/ql/src/test/results/clientpositive/timestamp_udf.q.out
index 452f366d41..201e4da796 100644
--- a/ql/src/test/results/clientpositive/timestamp_udf.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_udf.q.out
@@ -54,7 +54,7 @@ POSTHOOK: query: select unix_timestamp(t), year(t), month(t), day(t), dayofmonth
POSTHOOK: type: QUERY
POSTHOOK: Input: default@timestamp_udf_n0
#### A masked pattern was here ####
-1304690889 2011 5 6 6 18 7 8 9 2011-05-06
+1304665689 2011 5 6 6 18 7 8 9 2011-05-06
PREHOOK: query: select date_add(t, 5), date_sub(t, 10)
from timestamp_udf_n0
PREHOOK: type: QUERY
@@ -155,7 +155,7 @@ POSTHOOK: query: select unix_timestamp(t), year(t), month(t), day(t), dayofmonth
POSTHOOK: type: QUERY
POSTHOOK: Input: default@timestamp_udf_string
#### A masked pattern was here ####
-1304690889 2011 5 6 6 18 7 8 9 2011-05-06
+1304665689 2011 5 6 6 18 7 8 9 2011-05-06
PREHOOK: query: select date_add(t, 5), date_sub(t, 10) from timestamp_udf_string
PREHOOK: type: QUERY
PREHOOK: Input: default@timestamp_udf_string
diff --git a/ql/src/test/results/clientpositive/udf5.q.out b/ql/src/test/results/clientpositive/udf5.q.out
index 573eba5073..04327e856f 100644
--- a/ql/src/test/results/clientpositive/udf5.q.out
+++ b/ql/src/test/results/clientpositive/udf5.q.out
@@ -33,7 +33,7 @@ STAGE PLANS:
alias: dest1_n14
Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: '2008-11-11 15:32:20' (type: string), DATE'2008-11-11' (type: date), 1 (type: int), 11 (type: int), 2008 (type: int), 1 (type: int), 11 (type: int), 2008 (type: int)
+ expressions: '2008-11-11 23:32:20' (type: string), DATE'2008-11-11' (type: date), 1 (type: int), 11 (type: int), 2008 (type: int), 1 (type: int), 11 (type: int), 2008 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
Statistics: Num rows: 1 Data size: 183 Basic stats: COMPLETE Column stats: COMPLETE
ListSink
@@ -46,7 +46,7 @@ POSTHOOK: query: SELECT from_unixtime(1226446340), to_date(from_unixtime(1226446
POSTHOOK: type: QUERY
POSTHOOK: Input: default@dest1_n14
#### A masked pattern was here ####
-2008-11-11 15:32:20 2008-11-11 1 11 2008 1 11 2008
+2008-11-11 23:32:20 2008-11-11 1 11 2008 1 11 2008
PREHOOK: query: EXPLAIN
SELECT from_unixtime(unix_timestamp('2010-01-13 11:57:40', 'yyyy-MM-dd HH:mm:ss'), 'MM/dd/yy HH:mm:ss'), from_unixtime(unix_timestamp('2010-01-13 11:57:40')) from dest1_n14
PREHOOK: type: QUERY
diff --git a/ql/src/test/results/clientpositive/udf_mask.q.out b/ql/src/test/results/clientpositive/udf_mask.q.out
index e5e234453e..ca201fb530 100644
--- a/ql/src/test/results/clientpositive/udf_mask.q.out
+++ b/ql/src/test/results/clientpositive/udf_mask.q.out
@@ -84,4 +84,4 @@ POSTHOOK: query: select mask('TestString-123', 'X', 'x', '0', ':'),
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
#### A masked pattern was here ####
-XxxxXxxxxx:000 XxxxXxxxxx:000 XxxxXxxxxx:000:::::::::: 43 -9981 55555 55555 1900-01-01 1900-01-20 1900-04-01 2016-01-01 2016-04-01 2016-01-20 1900-04-20 2016-04-20
+XxxxXxxxxx:000 XxxxXxxxxx:000 XxxxXxxxxx:000:::::::::: 43 -9981 55555 55555 0001-01-01 0001-01-20 0001-04-01 2016-01-01 2016-04-01 2016-01-20 0001-04-20 2016-04-20
diff --git a/ql/src/test/results/clientpositive/udf_mask_first_n.q.out b/ql/src/test/results/clientpositive/udf_mask_first_n.q.out
index 823aca7e94..2d79ebe2b6 100644
--- a/ql/src/test/results/clientpositive/udf_mask_first_n.q.out
+++ b/ql/src/test/results/clientpositive/udf_mask_first_n.q.out
@@ -67,4 +67,4 @@ POSTHOOK: query: select mask_first_n('TestString-123', 4, 'X', 'x', '0', ':'),
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
#### A masked pattern was here ####
-XxxxString-123 XxxxString-123 XxxxString-123 43 -9981 55555 55555 1900-01-01
+XxxxString-123 XxxxString-123 XxxxString-123 43 -9981 55555 55555 0001-01-01
diff --git a/ql/src/test/results/clientpositive/udf_mask_last_n.q.out b/ql/src/test/results/clientpositive/udf_mask_last_n.q.out
index 57e74e3f83..73e94980d4 100644
--- a/ql/src/test/results/clientpositive/udf_mask_last_n.q.out
+++ b/ql/src/test/results/clientpositive/udf_mask_last_n.q.out
@@ -67,4 +67,4 @@ POSTHOOK: query: select mask_last_n('TestString-123', 4, 'X', 'x', '0', ':'),
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
#### A masked pattern was here ####
-TestString:000 TestString:000 TestString-123 :::: 43 15555 15555 15555 1900-01-01
+TestString:000 TestString:000 TestString-123 :::: 43 15555 15555 15555 0001-01-01
diff --git a/ql/src/test/results/clientpositive/udf_mask_show_first_n.q.out b/ql/src/test/results/clientpositive/udf_mask_show_first_n.q.out
index 7d92d63cb7..5e7a382b7c 100644
--- a/ql/src/test/results/clientpositive/udf_mask_show_first_n.q.out
+++ b/ql/src/test/results/clientpositive/udf_mask_show_first_n.q.out
@@ -67,4 +67,4 @@ POSTHOOK: query: select mask_show_first_n('TestString-123', 4, 'X', 'x', '0', ':
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
#### A masked pattern was here ####
-TestXxxxxx:000 TestXxxxxx:000 TestXxxxxx:000:::::::::: 123 12345 12345 12345 1900-01-01
+TestXxxxxx:000 TestXxxxxx:000 TestXxxxxx:000:::::::::: 123 12345 12345 12345 0001-01-01
diff --git a/ql/src/test/results/clientpositive/udf_mask_show_last_n.q.out b/ql/src/test/results/clientpositive/udf_mask_show_last_n.q.out
index 2f91394819..c293fa4a4b 100644
--- a/ql/src/test/results/clientpositive/udf_mask_show_last_n.q.out
+++ b/ql/src/test/results/clientpositive/udf_mask_show_last_n.q.out
@@ -67,4 +67,4 @@ POSTHOOK: query: select mask_show_last_n('TestString-123', 4, 'X', 'x', '0', ':'
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
#### A masked pattern was here ####
-XxxxXxxxxx-123 XxxxXxxxxx-123 XxxxXxxxxx:000:::::: 123 -13191 52345 52345 1900-01-01
+XxxxXxxxxx-123 XxxxXxxxxx-123 XxxxXxxxxx:000:::::: 123 -13191 52345 52345 0001-01-01
diff --git a/ql/src/test/results/clientpositive/udf_reflect2.q.out b/ql/src/test/results/clientpositive/udf_reflect2.q.out
index 4834cd633c..af9015ad63 100644
--- a/ql/src/test/results/clientpositive/udf_reflect2.q.out
+++ b/ql/src/test/results/clientpositive/udf_reflect2.q.out
@@ -43,7 +43,7 @@ SELECT key,
reflect2(ts, "getHours"),
reflect2(ts, "getMinutes"),
reflect2(ts, "getSeconds"),
- reflect2(ts, "getTime")
+ reflect2(ts, "toEpochMilli")
FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN EXTENDED
@@ -77,7 +77,7 @@ SELECT key,
reflect2(ts, "getHours"),
reflect2(ts, "getMinutes"),
reflect2(ts, "getSeconds"),
- reflect2(ts, "getTime")
+ reflect2(ts, "toEpochMilli")
FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5
POSTHOOK: type: QUERY
STAGE DEPENDENCIES:
@@ -93,7 +93,7 @@ STAGE PLANS:
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Select Operator
- expressions: UDFToInteger(key) (type: int), reflect2(UDFToInteger(key),'byteValue') (type: tinyint), reflect2(UDFToInteger(key),'shortValue') (type: smallint), reflect2(UDFToInteger(key),'intValue') (type: int), reflect2(UDFToInteger(key),'longValue') (type: bigint), reflect2(UDFToInteger(key),'floatValue') (type: float), reflect2(UDFToInteger(key),'doubleValue') (type: double), reflect2(UDFToInteger(key),'toString') (type: string), value (type: string), reflect2(value,'concat','_concat') (type: string), reflect2(value,'contains','86') (type: boolean), reflect2(value,'startsWith','v') (type: boolean), reflect2(value,'endsWith','6') (type: boolean), reflect2(value,'equals','val_86') (type: boolean), reflect2(value,'equalsIgnoreCase','VAL_86') (type: boolean), reflect2(value,'getBytes') (type: binary), reflect2(value,'indexOf','1') (type: int), reflect2(value,'lastIndexOf','1') (type: int), reflect2(value,'replace','val','VALUE') (type: string), reflect2(value,'substring',1) (type: string), reflect2(value,'substring',1,5) (type: string), reflect2(value,'toUpperCase') (type: string), reflect2(value,'trim') (type: string), TIMESTAMP'2013-02-15 19:41:20.0' (type: timestamp), 113 (type: int), 1 (type: int), 5 (type: int), 19 (type: int), 41 (type: int), 20 (type: int), 1360986080000L (type: bigint)
+ expressions: UDFToInteger(key) (type: int), reflect2(UDFToInteger(key),'byteValue') (type: tinyint), reflect2(UDFToInteger(key),'shortValue') (type: smallint), reflect2(UDFToInteger(key),'intValue') (type: int), reflect2(UDFToInteger(key),'longValue') (type: bigint), reflect2(UDFToInteger(key),'floatValue') (type: float), reflect2(UDFToInteger(key),'doubleValue') (type: double), reflect2(UDFToInteger(key),'toString') (type: string), value (type: string), reflect2(value,'concat','_concat') (type: string), reflect2(value,'contains','86') (type: boolean), reflect2(value,'startsWith','v') (type: boolean), reflect2(value,'endsWith','6') (type: boolean), reflect2(value,'equals','val_86') (type: boolean), reflect2(value,'equalsIgnoreCase','VAL_86') (type: boolean), reflect2(value,'getBytes') (type: binary), reflect2(value,'indexOf','1') (type: int), reflect2(value,'lastIndexOf','1') (type: int), reflect2(value,'replace','val','VALUE') (type: string), reflect2(value,'substring',1) (type: string), reflect2(value,'substring',1,5) (type: string), reflect2(value,'toUpperCase') (type: string), reflect2(value,'trim') (type: string), TIMESTAMP'2013-02-15 19:41:20' (type: timestamp), 2013 (type: int), 2 (type: int), 15 (type: int), 19 (type: int), 41 (type: int), 20 (type: int), 1360957280000L (type: bigint)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Limit
@@ -131,7 +131,7 @@ PREHOOK: query: SELECT key,
reflect2(ts, "getHours"),
reflect2(ts, "getMinutes"),
reflect2(ts, "getSeconds"),
- reflect2(ts, "getTime")
+ reflect2(ts, "toEpochMilli")
FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5
PREHOOK: type: QUERY
PREHOOK: Input: default@src
@@ -166,13 +166,13 @@ POSTHOOK: query: SELECT key,
reflect2(ts, "getHours"),
reflect2(ts, "getMinutes"),
reflect2(ts, "getSeconds"),
- reflect2(ts, "getTime")
+ reflect2(ts, "toEpochMilli")
FROM (select cast(key as int) key, value, cast('2013-02-15 19:41:20' as timestamp) ts from src) a LIMIT 5
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
#### A masked pattern was here ####
-238 -18 238 238 238 238.0 238.0 238 val_238 val_238_concat false true false false false val_238 -1 -1 VALUE_238 al_238 al_2 VAL_238 val_238 2013-02-15 19:41:20 113 1 5 19 41 20 1360986080000
-86 86 86 86 86 86.0 86.0 86 val_86 val_86_concat true true true true true val_86 -1 -1 VALUE_86 al_86 al_8 VAL_86 val_86 2013-02-15 19:41:20 113 1 5 19 41 20 1360986080000
-311 55 311 311 311 311.0 311.0 311 val_311 val_311_concat false true false false false val_311 5 6 VALUE_311 al_311 al_3 VAL_311 val_311 2013-02-15 19:41:20 113 1 5 19 41 20 1360986080000
-27 27 27 27 27 27.0 27.0 27 val_27 val_27_concat false true false false false val_27 -1 -1 VALUE_27 al_27 al_2 VAL_27 val_27 2013-02-15 19:41:20 113 1 5 19 41 20 1360986080000
-165 -91 165 165 165 165.0 165.0 165 val_165 val_165_concat false true false false false val_165 4 4 VALUE_165 al_165 al_1 VAL_165 val_165 2013-02-15 19:41:20 113 1 5 19 41 20 1360986080000
+238 -18 238 238 238 238.0 238.0 238 val_238 val_238_concat false true false false false val_238 -1 -1 VALUE_238 al_238 al_2 VAL_238 val_238 2013-02-15 19:41:20 2013 2 15 19 41 20 1360957280000
+86 86 86 86 86 86.0 86.0 86 val_86 val_86_concat true true true true true val_86 -1 -1 VALUE_86 al_86 al_8 VAL_86 val_86 2013-02-15 19:41:20 2013 2 15 19 41 20 1360957280000
+311 55 311 311 311 311.0 311.0 311 val_311 val_311_concat false true false false false val_311 5 6 VALUE_311 al_311 al_3 VAL_311 val_311 2013-02-15 19:41:20 2013 2 15 19 41 20 1360957280000
+27 27 27 27 27 27.0 27.0 27 val_27 val_27_concat false true false false false val_27 -1 -1 VALUE_27 al_27 al_2 VAL_27 val_27 2013-02-15 19:41:20 2013 2 15 19 41 20 1360957280000
+165 -91 165 165 165 165.0 165.0 165 val_165 val_165_concat false true false false false val_165 4 4 VALUE_165 al_165 al_1 VAL_165 val_165 2013-02-15 19:41:20 2013 2 15 19 41 20 1360957280000
diff --git a/ql/src/test/results/clientpositive/udf_trunc.q.out b/ql/src/test/results/clientpositive/udf_trunc.q.out
index 62ac552afd..d31c67197e 100644
--- a/ql/src/test/results/clientpositive/udf_trunc.q.out
+++ b/ql/src/test/results/clientpositive/udf_trunc.q.out
@@ -887,9 +887,9 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: '2014-02-01' (type: string), null (type: string), null (type: string), null (type: string), null (type: string), null (type: string)
+ expressions: null (type: string), null (type: string), null (type: string), null (type: string), null (type: string), null (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- Statistics: Num rows: 1 Data size: 514 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 504 Basic stats: COMPLETE Column stats: COMPLETE
ListSink
PREHOOK: query: SELECT
@@ -912,7 +912,7 @@ POSTHOOK: query: SELECT
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
#### A masked pattern was here ####
-2014-02-01 NULL NULL NULL NULL NULL
+NULL NULL NULL NULL NULL NULL
PREHOOK: query: EXPLAIN
SELECT
TRUNC('2014-01-34', 'Q'),
@@ -944,9 +944,9 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: '2014-01-01' (type: string), null (type: string), null (type: string), null (type: string), '2014-01-01' (type: string), null (type: string)
+ expressions: null (type: string), null (type: string), null (type: string), null (type: string), '2014-01-01' (type: string), null (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- Statistics: Num rows: 1 Data size: 524 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 514 Basic stats: COMPLETE Column stats: COMPLETE
ListSink
PREHOOK: query: SELECT
@@ -969,7 +969,7 @@ POSTHOOK: query: SELECT
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
#### A masked pattern was here ####
-2014-01-01 NULL NULL NULL 2014-01-01 NULL
+NULL NULL NULL NULL 2014-01-01 NULL
PREHOOK: query: EXPLAIN
SELECT
TRUNC('2014-01-34', 'YEAR'),
@@ -1001,9 +1001,9 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: '2014-01-01' (type: string), null (type: string), null (type: string), null (type: string), null (type: string), null (type: string)
+ expressions: null (type: string), null (type: string), null (type: string), null (type: string), null (type: string), null (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
- Statistics: Num rows: 1 Data size: 514 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 1 Data size: 504 Basic stats: COMPLETE Column stats: COMPLETE
ListSink
PREHOOK: query: SELECT
@@ -1026,4 +1026,4 @@ POSTHOOK: query: SELECT
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
#### A masked pattern was here ####
-2014-01-01 NULL NULL NULL NULL NULL
+NULL NULL NULL NULL NULL NULL
diff --git a/ql/src/test/results/clientpositive/vector_aggregate_9.q.out b/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
index fe7fbfdd56..97a03b1ddb 100644
--- a/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
+++ b/ql/src/test/results/clientpositive/vector_aggregate_9.q.out
@@ -418,4 +418,4 @@ POSTHOOK: query: select min(ts), max(ts), sum(ts), avg(ts) from vectortab2korc_n
POSTHOOK: type: QUERY
POSTHOOK: Input: default@vectortab2korc_n4
#### A masked pattern was here ####
-2013-02-18 21:06:48 2081-02-22 01:21:53 4.591384881081E12 2.4254542425150557E9
+2013-02-18 21:06:48 2081-02-22 01:21:53 4.591334884281E12 2.4254278311045957E9
diff --git a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
index b66c0b000f..83a44ec61e 100644
--- a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
+++ b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
@@ -296,7 +296,7 @@ order by k
POSTHOOK: type: QUERY
POSTHOOK: Input: default@hundredorc
#### A masked pattern was here ####
--27832781952
+-8303557760
PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION
SELECT count(*), bin
FROM hundredorc
diff --git a/ql/src/test/results/clientpositive/vector_case_when_2.q.out b/ql/src/test/results/clientpositive/vector_case_when_2.q.out
index 159c9831a5..3dd1ed9d4a 100644
--- a/ql/src/test/results/clientpositive/vector_case_when_2.q.out
+++ b/ql/src/test/results/clientpositive/vector_case_when_2.q.out
@@ -129,15 +129,15 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: timestamps
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00.0')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00.0')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
+ expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: timestamp), _col10 (type: string), _col1 (type: timestamp)
sort order: +++
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
value expressions: _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: int), _col8 (type: int), _col9 (type: date)
Map Vectorization:
enabled: true
@@ -153,10 +153,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey2 (type: timestamp), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: int), VALUE._col4 (type: string), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: date)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -243,49 +243,49 @@ NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL
1815-05-06 00:12:37.543584705 1815-05-04 22:09:33.543584705 1900s Old Old 1815 2018-03-08 23:04:59 12 NULL 1816-05-05
1883-04-17 04:14:34.647766229 1883-04-16 02:11:30.647766229 1900s Old Old 1883 2018-03-08 23:04:59 14 NULL 1884-04-16
1966-08-16 13:36:50.183618031 1966-08-15 11:33:46.183618031 Early 2010s Old Old 1966 1966-08-16 13:36:50.183618031 36 NULL 1967-08-16
-1973-04-17 06:30:38.596784156 1973-04-16 04:27:34.596784156 Early 2010s Old Old 1973 1973-04-17 06:30:38.596784156 30 NULL 1974-04-17
+1973-04-17 06:30:38.596784156 1973-04-16 04:27:34.596784156 Early 2010s Old Old 1973 1973-04-17 06:30:38.596784156 30 NULL 1973-04-18
1974-10-04 17:21:03.989 1974-10-03 15:17:59.989 Early 2010s Old Old 1974 1974-10-04 17:21:03.989 21 NULL 1974-10-05
1976-03-03 04:54:33.000895162 1976-03-02 02:51:29.000895162 Early 2010s Old Old 1976 1976-03-03 04:54:33.000895162 54 NULL 1976-03-04
-1976-05-06 00:42:30.910786948 1976-05-04 22:39:26.910786948 Early 2010s Old Old 1976 1976-05-06 00:42:30.910786948 42 NULL 1977-05-06
-1978-08-05 14:41:05.501 1978-08-04 12:38:01.501 Early 2010s Old Old 1978 1978-08-05 14:41:05.501 41 NULL 1978-08-06
-1981-04-25 09:01:12.077192689 1981-04-24 06:58:08.077192689 Early 2010s Old Old 1981 1981-04-25 09:01:12.077192689 1 NULL 1982-04-25
+1976-05-06 00:42:30.910786948 1976-05-04 22:39:26.910786948 Early 2010s Old Old 1976 1976-05-06 00:42:30.910786948 42 NULL 1976-05-07
+1978-08-05 14:41:05.501 1978-08-04 12:38:01.501 Early 2010s Old Old 1978 1978-08-05 14:41:05.501 41 NULL 1979-08-05
+1981-04-25 09:01:12.077192689 1981-04-24 06:58:08.077192689 Early 2010s Old Old 1981 1981-04-25 09:01:12.077192689 1 NULL 1981-04-26
1981-11-15 23:03:10.999338387 1981-11-14 21:00:06.999338387 Early 2010s Old Old 1981 1981-11-15 23:03:10.999338387 3 NULL 1981-11-16
-1985-07-20 09:30:11 1985-07-19 07:27:07 Early 2010s Old Old 1985 1985-07-20 09:30:11 30 NULL 1986-07-20
+1985-07-20 09:30:11 1985-07-19 07:27:07 Early 2010s Old Old 1985 1985-07-20 09:30:11 30 NULL 1985-07-21
1985-11-18 16:37:54 1985-11-17 14:34:50 Early 2010s Old Old 1985 1985-11-18 16:37:54 37 NULL 1985-11-19
1987-02-21 19:48:29 1987-02-20 17:45:25 Early 2010s Old Old 1987 1987-02-21 19:48:29 48 NULL 1987-02-22
-1987-05-28 13:52:07.900916635 1987-05-27 11:49:03.900916635 Early 2010s Old Old 1987 1987-05-28 13:52:07.900916635 52 NULL 1988-05-27
-1998-10-16 20:05:29.397591987 1998-10-15 18:02:25.397591987 Early 2010s Old Old 1998 1998-10-16 20:05:29.397591987 5 NULL 1999-10-16
+1987-05-28 13:52:07.900916635 1987-05-27 11:49:03.900916635 Early 2010s Old Old 1987 1987-05-28 13:52:07.900916635 52 NULL 1987-05-29
+1998-10-16 20:05:29.397591987 1998-10-15 18:02:25.397591987 Early 2010s Old Old 1998 1998-10-16 20:05:29.397591987 5 NULL 1998-10-17
1999-10-03 16:59:10.396903939 1999-10-02 14:56:06.396903939 Early 2010s Old Old 1999 1999-10-03 16:59:10.396903939 59 NULL 1999-10-04
2000-12-18 08:42:30.000595596 2000-12-17 06:39:26.000595596 Early 2010s Old Old 2000 2018-03-08 23:04:59 42 NULL 2000-12-19
-2002-05-10 05:29:48.990818073 2002-05-09 03:26:44.990818073 Early 2010s Early 2000s Early 2000s 2002 2018-03-08 23:04:59 29 NULL 2002-05-11
-2003-09-23 22:33:17.00003252 2003-09-22 20:30:13.00003252 Early 2010s Early 2000s Early 2000s 2003 2018-03-08 23:04:59 33 NULL 2004-09-22
+2002-05-10 05:29:48.990818073 2002-05-09 03:26:44.990818073 Early 2010s Early 2000s Early 2000s 2002 2018-03-08 23:04:59 29 NULL 2003-05-10
+2003-09-23 22:33:17.00003252 2003-09-22 20:30:13.00003252 Early 2010s Early 2000s Early 2000s 2003 2018-03-08 23:04:59 33 NULL 2003-09-24
2004-03-07 20:14:13 2004-03-06 18:11:09 Early 2010s Early 2000s Early 2000s 2004 2018-03-08 23:04:59 14 NULL 2004-03-08
-2007-02-09 05:17:29.368756876 2007-02-08 03:14:25.368756876 Late 2000s Late 2000s Late 2000s 2007 2018-03-08 23:04:59 17 NULL 2008-02-09
+2007-02-09 05:17:29.368756876 2007-02-08 03:14:25.368756876 Late 2000s Late 2000s Late 2000s 2007 2018-03-08 23:04:59 17 NULL 2007-02-10
2009-01-21 10:49:07.108 2009-01-20 08:46:03.108 Late 2000s Late 2000s Late 2000s 2009 2018-03-08 23:04:59 49 NULL 2009-01-22
2010-04-08 02:43:35.861742727 2010-04-07 00:40:31.861742727 Late 2000s Late 2000s Late 2000s 2010 2018-03-08 23:04:59 43 NULL 2010-04-09
2013-04-07 02:44:43.00086821 2013-04-06 00:41:39.00086821 Early 2010s Early 2010s NULL 2013 2018-03-08 23:04:59 44 NULL 2013-04-08
2013-04-10 00:43:46.854731546 2013-04-08 22:40:42.854731546 Early 2010s Early 2010s NULL 2013 2018-03-08 23:04:59 43 NULL 2013-04-11
-2021-09-24 03:18:32.413655165 2021-09-23 01:15:28.413655165 Unknown NULL NULL 2021 2018-03-08 23:04:59 NULL NULL 2021-09-25
+2021-09-24 03:18:32.413655165 2021-09-23 01:15:28.413655165 Unknown NULL NULL 2021 2018-03-08 23:04:59 NULL NULL 2022-09-24
2024-11-11 16:42:41.101 2024-11-10 14:39:37.101 Unknown NULL NULL 2024 2018-03-08 23:04:59 42 NULL 2024-11-12
4143-07-08 10:53:27.252802259 4143-07-07 08:50:23.252802259 Unknown NULL NULL 4143 2018-03-08 23:04:59 53 NULL 4143-07-09
4966-12-04 09:30:55.202 4966-12-03 07:27:51.202 Unknown NULL NULL 4966 2018-03-08 23:04:59 30 NULL 4966-12-05
-5339-02-01 14:10:01.085678691 5339-01-31 12:06:57.085678691 Unknown NULL NULL 5339 2018-03-08 23:04:59 10 NULL 5340-02-01
+5339-02-01 14:10:01.085678691 5339-01-31 12:06:57.085678691 Unknown NULL NULL 5339 2018-03-08 23:04:59 10 NULL 5339-02-02
5344-10-04 18:40:08.165 5344-10-03 16:37:04.165 Unknown NULL NULL 5344 2018-03-08 23:04:59 40 NULL 5344-10-05
5397-07-13 07:12:32.000896438 5397-07-12 05:09:28.000896438 Unknown NULL NULL 5397 2018-03-08 23:04:59 12 12 5397-07-14
-5966-07-09 03:30:50.597 5966-07-08 01:27:46.597 Unknown NULL NULL 5966 2018-03-08 23:04:59 30 30 5967-07-09
-6229-06-28 02:54:28.970117179 6229-06-27 00:51:24.970117179 Unknown NULL NULL 6229 2018-03-08 23:04:59 54 54 6230-06-28
-6482-04-27 12:07:38.073915413 6482-04-26 10:04:34.073915413 Unknown NULL NULL 6482 2018-03-08 23:04:59 7 7 6482-04-28
+5966-07-09 03:30:50.597 5966-07-08 01:27:46.597 Unknown NULL NULL 5966 2018-03-08 23:04:59 30 30 5966-07-10
+6229-06-28 02:54:28.970117179 6229-06-27 00:51:24.970117179 Unknown NULL NULL 6229 2018-03-08 23:04:59 54 54 6229-06-29
+6482-04-27 12:07:38.073915413 6482-04-26 10:04:34.073915413 Unknown NULL NULL 6482 2018-03-08 23:04:59 7 7 6483-04-27
6631-11-13 16:31:29.702202248 6631-11-12 14:28:25.702202248 Unknown NULL NULL 6631 2018-03-08 23:04:59 31 31 6631-11-14
6705-09-28 18:27:28.000845672 6705-09-27 16:24:24.000845672 Unknown NULL NULL 6705 2018-03-08 23:04:59 27 NULL 6705-09-29
6731-02-12 08:12:48.287783702 6731-02-11 06:09:44.287783702 Unknown NULL NULL 6731 2018-03-08 23:04:59 12 NULL 6731-02-13
-7160-12-02 06:00:24.81200852 7160-12-01 03:57:20.81200852 Unknown NULL NULL 7160 2018-03-08 23:04:59 0 NULL 7161-12-02
-7409-09-07 23:33:32.459349602 7409-09-06 21:30:28.459349602 Unknown NULL NULL 7409 2018-03-08 23:04:59 33 NULL 7409-09-08
+7160-12-02 06:00:24.81200852 7160-12-01 03:57:20.81200852 Unknown NULL NULL 7160 2018-03-08 23:04:59 0 NULL 7160-12-03
+7409-09-07 23:33:32.459349602 7409-09-06 21:30:28.459349602 Unknown NULL NULL 7409 2018-03-08 23:04:59 33 NULL 7410-09-07
7503-06-23 23:14:17.486 7503-06-22 21:11:13.486 Unknown NULL NULL 7503 2018-03-08 23:04:59 14 NULL 7503-06-24
8422-07-22 03:21:45.745036084 8422-07-21 01:18:41.745036084 Unknown NULL NULL 8422 2018-03-08 23:04:59 21 NULL 8422-07-23
8521-01-16 20:42:05.668832388 8521-01-15 18:39:01.668832388 Unknown NULL NULL 8521 2018-03-08 23:04:59 42 NULL 8521-01-17
9075-06-13 16:20:09.218517797 9075-06-12 14:17:05.218517797 Unknown NULL NULL 9075 2018-03-08 23:04:59 20 NULL 9075-06-14
9209-11-11 04:08:58.223768453 9209-11-10 02:05:54.223768453 Unknown NULL NULL 9209 2018-03-08 23:04:59 8 NULL 9209-11-12
-9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9403-01-10
+9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9404-01-09
PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
SELECT
ctimestamp1,
@@ -365,19 +365,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: timestamps
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
vectorizationSchemaColumns: [0:cdate:date, 1:ctimestamp1:timestamp, 2:stimestamp1:string, 3:ctimestamp2:timestamp, 4:ROW__ID:struct]
Select Operator
- expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00.0')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00.0')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
+ expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
Select Vectorization:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [1, 3, 10, 12, 13, 14, 11, 7, 16, 23, 2]
- selectExpressions: IfExprStringScalarStringGroupColumn(col 5:boolean, val 1800s or Earliercol 9:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00.0) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val 1900scol 10:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 9:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprStringScalarStringScalar(col 8:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean) -> 9:string) -> 10:string) -> 9:string) -> 10:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 12:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprColumnNull(col 8:boolean, col 9:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 8:boolean, ConstantVectorExpression(val Early 2010s) -> 9:string) -> 11:string) -> 12:string) -> 11:string) -> 12:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 13:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprNullNull(null, null) -> 11:string) -> 13:string) -> 11:string) -> 13:string, IfExprLongColumnLongColumn(col 5:boolean, col 6:int, col 7:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 5:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 7:int) -> 14:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 5:boolean) -> 11:string, IfExprNullColumn(col 5:boolean, null, col 6)(children: TimestampColEqualTimestampScalar(col 1:timestamp, val 2021-09-24 03:18:32.413655165) -> 5:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 6:int) -> 7:int, IfExprColumnNull(col 17:boolean, col 15:int, null)(children: ColAndCol(col 15:boolean, col 16:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 15:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 16:boolean) -> 17:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 15:int) -> 16:int, IfExprLongColumnLongColumn(col 20:boolean, col 21:date, col 22:date)(children: DoubleColGreaterDoubleScalar(col 19:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 18:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 18:double) -> 19:double) -> 20:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 21:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 22:date) -> 23:date
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ selectExpressions: IfExprStringScalarStringGroupColumn(col 5:boolean, val 1800s or Earliercol 9:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-30 16:00:00.0) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val 1900scol 10:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1899-12-31 16:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 9:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprStringScalarStringScalar(col 8:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 15:59:59.999999999) -> 8:boolean) -> 9:string) -> 10:string) -> 9:string) -> 10:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 15:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 12:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2005-12-31 16:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprColumnNull(col 8:boolean, col 9:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 15:59:59.999999999) -> 8:boolean, ConstantVectorExpression(val Early 2010s) -> 9:string) -> 11:string) -> 12:string) -> 11:string) -> 12:string, IfExprStringScalarStringGroupColumn(col 5:boolean, val Oldcol 11:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 15:59:59.999999999) -> 5:boolean, IfExprStringScalarStringGroupColumn(col 6:boolean, val Early 2000scol 13:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2005-12-31 16:00:00.0) -> 6:boolean, IfExprStringScalarStringGroupColumn(col 7:boolean, val Late 2000scol 11:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 7:boolean, IfExprNullNull(null, null) -> 11:string) -> 13:string) -> 11:string) -> 13:string, IfExprLongColumnLongColumn(col 5:boolean, col 6:int, col 7:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 10:21:03.989) -> 5:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 6:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 7:int) -> 14:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 5:boolean) -> 11:string, IfExprNullColumn(col 5:boolean, null, col 6)(children: TimestampColEqualTimestampScalar(col 1:timestamp, val 2021-09-23 20:18:32.413655165) -> 5:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 6:int) -> 7:int, IfExprColumnNull(col 17:boolean, col 15:int, null)(children: ColAndCol(col 15:boolean, col 16:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 11:40:08.165) -> 15:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 08:31:29.702202248) -> 16:boolean) -> 17:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 15:int) -> 16:int, IfExprLongColumnLongColumn(col 20:boolean, col 21:date, col 22:date)(children: DoubleColGreaterDoubleScalar(col 19:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 18:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 18:double) -> 19:double) -> 20:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 21:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 22:date) -> 23:date
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: timestamp), _col10 (type: string), _col1 (type: timestamp)
sort order: +++
@@ -386,7 +386,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
value expressions: _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: int), _col8 (type: int), _col9 (type: date)
Execution mode: vectorized
Map Vectorization:
@@ -412,10 +412,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey2 (type: timestamp), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: int), VALUE._col4 (type: string), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: date)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -502,49 +502,49 @@ NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL
1815-05-06 00:12:37.543584705 1815-05-04 22:09:33.543584705 1900s Old Old 1815 2018-03-08 23:04:59 12 NULL 1816-05-05
1883-04-17 04:14:34.647766229 1883-04-16 02:11:30.647766229 1900s Old Old 1883 2018-03-08 23:04:59 14 NULL 1884-04-16
1966-08-16 13:36:50.183618031 1966-08-15 11:33:46.183618031 Early 2010s Old Old 1966 1966-08-16 13:36:50.183618031 36 NULL 1967-08-16
-1973-04-17 06:30:38.596784156 1973-04-16 04:27:34.596784156 Early 2010s Old Old 1973 1973-04-17 06:30:38.596784156 30 NULL 1974-04-17
+1973-04-17 06:30:38.596784156 1973-04-16 04:27:34.596784156 Early 2010s Old Old 1973 1973-04-17 06:30:38.596784156 30 NULL 1973-04-18
1974-10-04 17:21:03.989 1974-10-03 15:17:59.989 Early 2010s Old Old 1974 1974-10-04 17:21:03.989 21 NULL 1974-10-05
1976-03-03 04:54:33.000895162 1976-03-02 02:51:29.000895162 Early 2010s Old Old 1976 1976-03-03 04:54:33.000895162 54 NULL 1976-03-04
-1976-05-06 00:42:30.910786948 1976-05-04 22:39:26.910786948 Early 2010s Old Old 1976 1976-05-06 00:42:30.910786948 42 NULL 1977-05-06
-1978-08-05 14:41:05.501 1978-08-04 12:38:01.501 Early 2010s Old Old 1978 1978-08-05 14:41:05.501 41 NULL 1978-08-06
-1981-04-25 09:01:12.077192689 1981-04-24 06:58:08.077192689 Early 2010s Old Old 1981 1981-04-25 09:01:12.077192689 1 NULL 1982-04-25
+1976-05-06 00:42:30.910786948 1976-05-04 22:39:26.910786948 Early 2010s Old Old 1976 1976-05-06 00:42:30.910786948 42 NULL 1976-05-07
+1978-08-05 14:41:05.501 1978-08-04 12:38:01.501 Early 2010s Old Old 1978 1978-08-05 14:41:05.501 41 NULL 1979-08-05
+1981-04-25 09:01:12.077192689 1981-04-24 06:58:08.077192689 Early 2010s Old Old 1981 1981-04-25 09:01:12.077192689 1 NULL 1981-04-26
1981-11-15 23:03:10.999338387 1981-11-14 21:00:06.999338387 Early 2010s Old Old 1981 1981-11-15 23:03:10.999338387 3 NULL 1981-11-16
-1985-07-20 09:30:11 1985-07-19 07:27:07 Early 2010s Old Old 1985 1985-07-20 09:30:11 30 NULL 1986-07-20
+1985-07-20 09:30:11 1985-07-19 07:27:07 Early 2010s Old Old 1985 1985-07-20 09:30:11 30 NULL 1985-07-21
1985-11-18 16:37:54 1985-11-17 14:34:50 Early 2010s Old Old 1985 1985-11-18 16:37:54 37 NULL 1985-11-19
1987-02-21 19:48:29 1987-02-20 17:45:25 Early 2010s Old Old 1987 1987-02-21 19:48:29 48 NULL 1987-02-22
-1987-05-28 13:52:07.900916635 1987-05-27 11:49:03.900916635 Early 2010s Old Old 1987 1987-05-28 13:52:07.900916635 52 NULL 1988-05-27
-1998-10-16 20:05:29.397591987 1998-10-15 18:02:25.397591987 Early 2010s Old Old 1998 1998-10-16 20:05:29.397591987 5 NULL 1999-10-16
+1987-05-28 13:52:07.900916635 1987-05-27 11:49:03.900916635 Early 2010s Old Old 1987 1987-05-28 13:52:07.900916635 52 NULL 1987-05-29
+1998-10-16 20:05:29.397591987 1998-10-15 18:02:25.397591987 Early 2010s Old Old 1998 1998-10-16 20:05:29.397591987 5 NULL 1998-10-17
1999-10-03 16:59:10.396903939 1999-10-02 14:56:06.396903939 Early 2010s Old Old 1999 1999-10-03 16:59:10.396903939 59 NULL 1999-10-04
2000-12-18 08:42:30.000595596 2000-12-17 06:39:26.000595596 Early 2010s Old Old 2000 2018-03-08 23:04:59 42 NULL 2000-12-19
-2002-05-10 05:29:48.990818073 2002-05-09 03:26:44.990818073 Early 2010s Early 2000s Early 2000s 2002 2018-03-08 23:04:59 29 NULL 2002-05-11
-2003-09-23 22:33:17.00003252 2003-09-22 20:30:13.00003252 Early 2010s Early 2000s Early 2000s 2003 2018-03-08 23:04:59 33 NULL 2004-09-22
+2002-05-10 05:29:48.990818073 2002-05-09 03:26:44.990818073 Early 2010s Early 2000s Early 2000s 2002 2018-03-08 23:04:59 29 NULL 2003-05-10
+2003-09-23 22:33:17.00003252 2003-09-22 20:30:13.00003252 Early 2010s Early 2000s Early 2000s 2003 2018-03-08 23:04:59 33 NULL 2003-09-24
2004-03-07 20:14:13 2004-03-06 18:11:09 Early 2010s Early 2000s Early 2000s 2004 2018-03-08 23:04:59 14 NULL 2004-03-08
-2007-02-09 05:17:29.368756876 2007-02-08 03:14:25.368756876 Late 2000s Late 2000s Late 2000s 2007 2018-03-08 23:04:59 17 NULL 2008-02-09
+2007-02-09 05:17:29.368756876 2007-02-08 03:14:25.368756876 Late 2000s Late 2000s Late 2000s 2007 2018-03-08 23:04:59 17 NULL 2007-02-10
2009-01-21 10:49:07.108 2009-01-20 08:46:03.108 Late 2000s Late 2000s Late 2000s 2009 2018-03-08 23:04:59 49 NULL 2009-01-22
2010-04-08 02:43:35.861742727 2010-04-07 00:40:31.861742727 Late 2000s Late 2000s Late 2000s 2010 2018-03-08 23:04:59 43 NULL 2010-04-09
2013-04-07 02:44:43.00086821 2013-04-06 00:41:39.00086821 Early 2010s Early 2010s NULL 2013 2018-03-08 23:04:59 44 NULL 2013-04-08
2013-04-10 00:43:46.854731546 2013-04-08 22:40:42.854731546 Early 2010s Early 2010s NULL 2013 2018-03-08 23:04:59 43 NULL 2013-04-11
-2021-09-24 03:18:32.413655165 2021-09-23 01:15:28.413655165 Unknown NULL NULL 2021 2018-03-08 23:04:59 NULL NULL 2021-09-25
+2021-09-24 03:18:32.413655165 2021-09-23 01:15:28.413655165 Unknown NULL NULL 2021 2018-03-08 23:04:59 NULL NULL 2022-09-24
2024-11-11 16:42:41.101 2024-11-10 14:39:37.101 Unknown NULL NULL 2024 2018-03-08 23:04:59 42 NULL 2024-11-12
4143-07-08 10:53:27.252802259 4143-07-07 08:50:23.252802259 Unknown NULL NULL 4143 2018-03-08 23:04:59 53 NULL 4143-07-09
4966-12-04 09:30:55.202 4966-12-03 07:27:51.202 Unknown NULL NULL 4966 2018-03-08 23:04:59 30 NULL 4966-12-05
-5339-02-01 14:10:01.085678691 5339-01-31 12:06:57.085678691 Unknown NULL NULL 5339 2018-03-08 23:04:59 10 NULL 5340-02-01
+5339-02-01 14:10:01.085678691 5339-01-31 12:06:57.085678691 Unknown NULL NULL 5339 2018-03-08 23:04:59 10 NULL 5339-02-02
5344-10-04 18:40:08.165 5344-10-03 16:37:04.165 Unknown NULL NULL 5344 2018-03-08 23:04:59 40 NULL 5344-10-05
5397-07-13 07:12:32.000896438 5397-07-12 05:09:28.000896438 Unknown NULL NULL 5397 2018-03-08 23:04:59 12 12 5397-07-14
-5966-07-09 03:30:50.597 5966-07-08 01:27:46.597 Unknown NULL NULL 5966 2018-03-08 23:04:59 30 30 5967-07-09
-6229-06-28 02:54:28.970117179 6229-06-27 00:51:24.970117179 Unknown NULL NULL 6229 2018-03-08 23:04:59 54 54 6230-06-28
-6482-04-27 12:07:38.073915413 6482-04-26 10:04:34.073915413 Unknown NULL NULL 6482 2018-03-08 23:04:59 7 7 6482-04-28
+5966-07-09 03:30:50.597 5966-07-08 01:27:46.597 Unknown NULL NULL 5966 2018-03-08 23:04:59 30 30 5966-07-10
+6229-06-28 02:54:28.970117179 6229-06-27 00:51:24.970117179 Unknown NULL NULL 6229 2018-03-08 23:04:59 54 54 6229-06-29
+6482-04-27 12:07:38.073915413 6482-04-26 10:04:34.073915413 Unknown NULL NULL 6482 2018-03-08 23:04:59 7 7 6483-04-27
6631-11-13 16:31:29.702202248 6631-11-12 14:28:25.702202248 Unknown NULL NULL 6631 2018-03-08 23:04:59 31 31 6631-11-14
6705-09-28 18:27:28.000845672 6705-09-27 16:24:24.000845672 Unknown NULL NULL 6705 2018-03-08 23:04:59 27 NULL 6705-09-29
6731-02-12 08:12:48.287783702 6731-02-11 06:09:44.287783702 Unknown NULL NULL 6731 2018-03-08 23:04:59 12 NULL 6731-02-13
-7160-12-02 06:00:24.81200852 7160-12-01 03:57:20.81200852 Unknown NULL NULL 7160 2018-03-08 23:04:59 0 NULL 7161-12-02
-7409-09-07 23:33:32.459349602 7409-09-06 21:30:28.459349602 Unknown NULL NULL 7409 2018-03-08 23:04:59 33 NULL 7409-09-08
+7160-12-02 06:00:24.81200852 7160-12-01 03:57:20.81200852 Unknown NULL NULL 7160 2018-03-08 23:04:59 0 NULL 7160-12-03
+7409-09-07 23:33:32.459349602 7409-09-06 21:30:28.459349602 Unknown NULL NULL 7409 2018-03-08 23:04:59 33 NULL 7410-09-07
7503-06-23 23:14:17.486 7503-06-22 21:11:13.486 Unknown NULL NULL 7503 2018-03-08 23:04:59 14 NULL 7503-06-24
8422-07-22 03:21:45.745036084 8422-07-21 01:18:41.745036084 Unknown NULL NULL 8422 2018-03-08 23:04:59 21 NULL 8422-07-23
8521-01-16 20:42:05.668832388 8521-01-15 18:39:01.668832388 Unknown NULL NULL 8521 2018-03-08 23:04:59 42 NULL 8521-01-17
9075-06-13 16:20:09.218517797 9075-06-12 14:17:05.218517797 Unknown NULL NULL 9075 2018-03-08 23:04:59 20 NULL 9075-06-14
9209-11-11 04:08:58.223768453 9209-11-10 02:05:54.223768453 Unknown NULL NULL 9209 2018-03-08 23:04:59 8 NULL 9209-11-12
-9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9403-01-10
+9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9404-01-09
PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
SELECT
ctimestamp1,
@@ -624,19 +624,19 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: timestamps
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
vectorizationSchemaColumns: [0:cdate:date, 1:ctimestamp1:timestamp, 2:stimestamp1:string, 3:ctimestamp2:timestamp, 4:ROW__ID:struct]
Select Operator
- expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00.0')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00.0')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00.0')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
+ expressions: ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), CASE WHEN ((ctimestamp2 <= TIMESTAMP'1800-12-31 00:00:00')) THEN ('1800s or Earlier') WHEN ((ctimestamp2 < TIMESTAMP'1900-01-01 00:00:00')) THEN ('1900s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE ('Unknown') END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN ('Early 2010s') ELSE (null) END (type: string), CASE WHEN ((ctimestamp2 <= TIMESTAMP'2000-12-31 23:59:59.999999999')) THEN ('Old') WHEN ((ctimestamp2 < TIMESTAMP'2006-01-01 00:00:00')) THEN ('Early 2000s') WHEN (ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') THEN ('Late 2000s') WHEN ((ctimestamp2 <= TIMESTAMP'2015-12-31 23:59:59.999999999')) THEN (null) ELSE (null) END (type: string), if((ctimestamp1 < TIMESTAMP'1974-10-04 17:21:03.989'), year(ctimestamp1), year(ctimestamp2)) (type: int), CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END (type: string), if((ctimestamp1 = TIMESTAMP'2021-09-24 03:18:32.413655165'), null, minute(ctimestamp1)) (type: int), if(((ctimestamp2 >= TIMESTAMP'5344-10-04 18:40:08.165') and (ctimestamp2 < TIMESTAMP'6631-11-13 16:31:29.702202248')), minute(ctimestamp1), null) (type: int), if(((UDFToDouble(ctimestamp1) % 500.0D) > 100.0D), date_add(cdate, 1), date_add(cdate, 365)) (type: date), stimestamp1 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
Select Vectorization:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [1, 3, 15, 26, 36, 40, 42, 44, 46, 53, 2]
- selectExpressions: IfExprColumnCondExpr(col 5:boolean, col 6:stringcol 14:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-31 00:00:00.0) -> 5:boolean, ConstantVectorExpression(val 1800s or Earlier) -> 6:string, IfExprColumnCondExpr(col 7:boolean, col 8:stringcol 13:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1900-01-01 00:00:00.0) -> 7:boolean, ConstantVectorExpression(val 1900s) -> 8:string, IfExprColumnCondExpr(col 9:boolean, col 10:stringcol 12:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 9:boolean, ConstantVectorExpression(val Late 2000s) -> 10:string, IfExprStringScalarStringScalar(col 11:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 11:boolean) -> 12:string) -> 13:string) -> 14:string) -> 15:string, IfExprColumnCondExpr(col 11:boolean, col 16:stringcol 25:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 11:boolean, ConstantVectorExpression(val Old) -> 16:string, IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 24:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 17:boolean, ConstantVectorExpression(val Early 2000s) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 23:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 19:boolean, ConstantVectorExpression(val Late 2000s) -> 20:string, IfExprColumnNull(col 21:boolean, col 22:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 23:59:59.999999999) -> 21:boolean, ConstantVectorExpression(val Early 2010s) -> 22:string) -> 23:string) -> 24:string) -> 25:string) -> 26:string, IfExprColumnCondExpr(col 27:boolean, col 28:stringcol 35:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 23:59:59.999999999) -> 27:boolean, ConstantVectorExpression(val Old) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 34:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2006-01-01 00:00:00.0) -> 29:boolean, ConstantVectorExpression(val Early 2000s) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 33:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00.0' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 31:boolean, ConstantVectorExpression(val Late 2000s) -> 32:string, IfExprNullNull(null, null) -> 33:string) -> 34:string) -> 35:string) -> 36:string, IfExprCondExprCondExpr(col 37:boolean, col 38:intcol 39:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 17:21:03.989) -> 37:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 38:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 39:int) -> 40:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59.0') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 41:boolean) -> 42:string, IfExprNullCondExpr(col 41:boolean, null, col 43:int)(children: TimestampColEqualTimestampScalar(col 1:timestamp, val 2021-09-24 03:18:32.413655165) -> 41:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 43:int) -> 44:int, IfExprCondExprNull(col 47:boolean, col 45:int, null)(children: ColAndCol(col 45:boolean, col 46:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 18:40:08.165) -> 45:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 16:31:29.702202248) -> 46:boolean) -> 47:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 45:int) -> 46:int, IfExprCondExprCondExpr(col 50:boolean, col 51:datecol 52:date)(children: DoubleColGreaterDoubleScalar(col 49:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 48:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 48:double) -> 49:double) -> 50:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 51:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 52:date) -> 53:date
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ selectExpressions: IfExprColumnCondExpr(col 5:boolean, col 6:stringcol 14:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 1800-12-30 16:00:00.0) -> 5:boolean, ConstantVectorExpression(val 1800s or Earlier) -> 6:string, IfExprColumnCondExpr(col 7:boolean, col 8:stringcol 13:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 1899-12-31 16:00:00.0) -> 7:boolean, ConstantVectorExpression(val 1900s) -> 8:string, IfExprColumnCondExpr(col 9:boolean, col 10:stringcol 12:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 9:boolean, ConstantVectorExpression(val Late 2000s) -> 10:string, IfExprStringScalarStringScalar(col 11:boolean, val Early 2010s, val Unknown)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 15:59:59.999999999) -> 11:boolean) -> 12:string) -> 13:string) -> 14:string) -> 15:string, IfExprColumnCondExpr(col 11:boolean, col 16:stringcol 25:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 15:59:59.999999999) -> 11:boolean, ConstantVectorExpression(val Old) -> 16:string, IfExprColumnCondExpr(col 17:boolean, col 18:stringcol 24:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2005-12-31 16:00:00.0) -> 17:boolean, ConstantVectorExpression(val Early 2000s) -> 18:string, IfExprColumnCondExpr(col 19:boolean, col 20:stringcol 23:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 19:boolean, ConstantVectorExpression(val Late 2000s) -> 20:string, IfExprColumnNull(col 21:boolean, col 22:string, null)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2015-12-31 15:59:59.999999999) -> 21:boolean, ConstantVectorExpression(val Early 2010s) -> 22:string) -> 23:string) -> 24:string) -> 25:string) -> 26:string, IfExprColumnCondExpr(col 27:boolean, col 28:stringcol 35:string)(children: TimestampColLessEqualTimestampScalar(col 3:timestamp, val 2000-12-31 15:59:59.999999999) -> 27:boolean, ConstantVectorExpression(val Old) -> 28:string, IfExprColumnCondExpr(col 29:boolean, col 30:stringcol 34:string)(children: TimestampColLessTimestampScalar(col 3:timestamp, val 2005-12-31 16:00:00.0) -> 29:boolean, ConstantVectorExpression(val Early 2000s) -> 30:string, IfExprColumnCondExpr(col 31:boolean, col 32:stringcol 33:string)(children: VectorUDFAdaptor(ctimestamp2 BETWEEN TIMESTAMP'2006-01-01 00:00:00' AND TIMESTAMP'2010-12-31 23:59:59.999999999') -> 31:boolean, ConstantVectorExpression(val Late 2000s) -> 32:string, IfExprNullNull(null, null) -> 33:string) -> 34:string) -> 35:string) -> 36:string, IfExprCondExprCondExpr(col 37:boolean, col 38:intcol 39:int)(children: TimestampColLessTimestampScalar(col 1:timestamp, val 1974-10-04 10:21:03.989) -> 37:boolean, VectorUDFYearTimestamp(col 1:timestamp, field YEAR) -> 38:int, VectorUDFYearTimestamp(col 3:timestamp, field YEAR) -> 39:int) -> 40:int, VectorUDFAdaptor(CASE WHEN ((stimestamp1 like '%19%')) THEN (stimestamp1) ELSE (TIMESTAMP'2018-03-08 23:04:59') END)(children: SelectStringColLikeStringScalar(col 2:string) -> 41:boolean) -> 42:string, IfExprNullCondExpr(col 41:boolean, null, col 43:int)(children: TimestampColEqualTimestampScalar(col 1:timestamp, val 2021-09-23 20:18:32.413655165) -> 41:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 43:int) -> 44:int, IfExprCondExprNull(col 47:boolean, col 45:int, null)(children: ColAndCol(col 45:boolean, col 46:boolean)(children: TimestampColGreaterEqualTimestampScalar(col 3:timestamp, val 5344-10-04 11:40:08.165) -> 45:boolean, TimestampColLessTimestampScalar(col 3:timestamp, val 6631-11-13 08:31:29.702202248) -> 46:boolean) -> 47:boolean, VectorUDFMinuteTimestamp(col 1:timestamp, field MINUTE) -> 45:int) -> 46:int, IfExprCondExprCondExpr(col 50:boolean, col 51:datecol 52:date)(children: DoubleColGreaterDoubleScalar(col 49:double, val 100.0)(children: DoubleColModuloDoubleScalar(col 48:double, val 500.0)(children: CastTimestampToDouble(col 1:timestamp) -> 48:double) -> 49:double) -> 50:boolean, VectorUDFDateAddColScalar(col 0:date, val 1) -> 51:date, VectorUDFDateAddColScalar(col 0:date, val 365) -> 52:date) -> 53:date
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: timestamp), _col10 (type: string), _col1 (type: timestamp)
sort order: +++
@@ -645,7 +645,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
value expressions: _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: int), _col8 (type: int), _col9 (type: date)
Execution mode: vectorized
Map Vectorization:
@@ -671,10 +671,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: timestamp), KEY.reducesinkkey2 (type: timestamp), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: int), VALUE._col4 (type: string), VALUE._col5 (type: int), VALUE._col6 (type: int), VALUE._col7 (type: date)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 51 Data size: 12384 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 51 Data size: 12300 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -761,46 +761,46 @@ NULL NULL Unknown NULL NULL NULL 2018-03-08 23:04:59 NULL NULL NULL
1815-05-06 00:12:37.543584705 1815-05-04 22:09:33.543584705 1900s Old Old 1815 2018-03-08 23:04:59 12 NULL 1816-05-05
1883-04-17 04:14:34.647766229 1883-04-16 02:11:30.647766229 1900s Old Old 1883 2018-03-08 23:04:59 14 NULL 1884-04-16
1966-08-16 13:36:50.183618031 1966-08-15 11:33:46.183618031 Early 2010s Old Old 1966 1966-08-16 13:36:50.183618031 36 NULL 1967-08-16
-1973-04-17 06:30:38.596784156 1973-04-16 04:27:34.596784156 Early 2010s Old Old 1973 1973-04-17 06:30:38.596784156 30 NULL 1974-04-17
+1973-04-17 06:30:38.596784156 1973-04-16 04:27:34.596784156 Early 2010s Old Old 1973 1973-04-17 06:30:38.596784156 30 NULL 1973-04-18
1974-10-04 17:21:03.989 1974-10-03 15:17:59.989 Early 2010s Old Old 1974 1974-10-04 17:21:03.989 21 NULL 1974-10-05
1976-03-03 04:54:33.000895162 1976-03-02 02:51:29.000895162 Early 2010s Old Old 1976 1976-03-03 04:54:33.000895162 54 NULL 1976-03-04
-1976-05-06 00:42:30.910786948 1976-05-04 22:39:26.910786948 Early 2010s Old Old 1976 1976-05-06 00:42:30.910786948 42 NULL 1977-05-06
-1978-08-05 14:41:05.501 1978-08-04 12:38:01.501 Early 2010s Old Old 1978 1978-08-05 14:41:05.501 41 NULL 1978-08-06
-1981-04-25 09:01:12.077192689 1981-04-24 06:58:08.077192689 Early 2010s Old Old 1981 1981-04-25 09:01:12.077192689 1 NULL 1982-04-25
+1976-05-06 00:42:30.910786948 1976-05-04 22:39:26.910786948 Early 2010s Old Old 1976 1976-05-06 00:42:30.910786948 42 NULL 1976-05-07
+1978-08-05 14:41:05.501 1978-08-04 12:38:01.501 Early 2010s Old Old 1978 1978-08-05 14:41:05.501 41 NULL 1979-08-05
+1981-04-25 09:01:12.077192689 1981-04-24 06:58:08.077192689 Early 2010s Old Old 1981 1981-04-25 09:01:12.077192689 1 NULL 1981-04-26
1981-11-15 23:03:10.999338387 1981-11-14 21:00:06.999338387 Early 2010s Old Old 1981 1981-11-15 23:03:10.999338387 3 NULL 1981-11-16
-1985-07-20 09:30:11 1985-07-19 07:27:07 Early 2010s Old Old 1985 1985-07-20 09:30:11 30 NULL 1986-07-20
+1985-07-20 09:30:11 1985-07-19 07:27:07 Early 2010s Old Old 1985 1985-07-20 09:30:11 30 NULL 1985-07-21
1985-11-18 16:37:54 1985-11-17 14:34:50 Early 2010s Old Old 1985 1985-11-18 16:37:54 37 NULL 1985-11-19
1987-02-21 19:48:29 1987-02-20 17:45:25 Early 2010s Old Old 1987 1987-02-21 19:48:29 48 NULL 1987-02-22
-1987-05-28 13:52:07.900916635 1987-05-27 11:49:03.900916635 Early 2010s Old Old 1987 1987-05-28 13:52:07.900916635 52 NULL 1988-05-27
-1998-10-16 20:05:29.397591987 1998-10-15 18:02:25.397591987 Early 2010s Old Old 1998 1998-10-16 20:05:29.397591987 5 NULL 1999-10-16
+1987-05-28 13:52:07.900916635 1987-05-27 11:49:03.900916635 Early 2010s Old Old 1987 1987-05-28 13:52:07.900916635 52 NULL 1987-05-29
+1998-10-16 20:05:29.397591987 1998-10-15 18:02:25.397591987 Early 2010s Old Old 1998 1998-10-16 20:05:29.397591987 5 NULL 1998-10-17
1999-10-03 16:59:10.396903939 1999-10-02 14:56:06.396903939 Early 2010s Old Old 1999 1999-10-03 16:59:10.396903939 59 NULL 1999-10-04
2000-12-18 08:42:30.000595596 2000-12-17 06:39:26.000595596 Early 2010s Old Old 2000 2018-03-08 23:04:59 42 NULL 2000-12-19
-2002-05-10 05:29:48.990818073 2002-05-09 03:26:44.990818073 Early 2010s Early 2000s Early 2000s 2002 2018-03-08 23:04:59 29 NULL 2002-05-11
-2003-09-23 22:33:17.00003252 2003-09-22 20:30:13.00003252 Early 2010s Early 2000s Early 2000s 2003 2018-03-08 23:04:59 33 NULL 2004-09-22
+2002-05-10 05:29:48.990818073 2002-05-09 03:26:44.990818073 Early 2010s Early 2000s Early 2000s 2002 2018-03-08 23:04:59 29 NULL 2003-05-10
+2003-09-23 22:33:17.00003252 2003-09-22 20:30:13.00003252 Early 2010s Early 2000s Early 2000s 2003 2018-03-08 23:04:59 33 NULL 2003-09-24
2004-03-07 20:14:13 2004-03-06 18:11:09 Early 2010s Early 2000s Early 2000s 2004 2018-03-08 23:04:59 14 NULL 2004-03-08
-2007-02-09 05:17:29.368756876 2007-02-08 03:14:25.368756876 Late 2000s Late 2000s Late 2000s 2007 2018-03-08 23:04:59 17 NULL 2008-02-09
+2007-02-09 05:17:29.368756876 2007-02-08 03:14:25.368756876 Late 2000s Late 2000s Late 2000s 2007 2018-03-08 23:04:59 17 NULL 2007-02-10
2009-01-21 10:49:07.108 2009-01-20 08:46:03.108 Late 2000s Late 2000s Late 2000s 2009 2018-03-08 23:04:59 49 NULL 2009-01-22
2010-04-08 02:43:35.861742727 2010-04-07 00:40:31.861742727 Late 2000s Late 2000s Late 2000s 2010 2018-03-08 23:04:59 43 NULL 2010-04-09
2013-04-07 02:44:43.00086821 2013-04-06 00:41:39.00086821 Early 2010s Early 2010s NULL 2013 2018-03-08 23:04:59 44 NULL 2013-04-08
2013-04-10 00:43:46.854731546 2013-04-08 22:40:42.854731546 Early 2010s Early 2010s NULL 2013 2018-03-08 23:04:59 43 NULL 2013-04-11
-2021-09-24 03:18:32.413655165 2021-09-23 01:15:28.413655165 Unknown NULL NULL 2021 2018-03-08 23:04:59 NULL NULL 2021-09-25
+2021-09-24 03:18:32.413655165 2021-09-23 01:15:28.413655165 Unknown NULL NULL 2021 2018-03-08 23:04:59 NULL NULL 2022-09-24
2024-11-11 16:42:41.101 2024-11-10 14:39:37.101 Unknown NULL NULL 2024 2018-03-08 23:04:59 42 NULL 2024-11-12
4143-07-08 10:53:27.252802259 4143-07-07 08:50:23.252802259 Unknown NULL NULL 4143 2018-03-08 23:04:59 53 NULL 4143-07-09
4966-12-04 09:30:55.202 4966-12-03 07:27:51.202 Unknown NULL NULL 4966 2018-03-08 23:04:59 30 NULL 4966-12-05
-5339-02-01 14:10:01.085678691 5339-01-31 12:06:57.085678691 Unknown NULL NULL 5339 2018-03-08 23:04:59 10 NULL 5340-02-01
+5339-02-01 14:10:01.085678691 5339-01-31 12:06:57.085678691 Unknown NULL NULL 5339 2018-03-08 23:04:59 10 NULL 5339-02-02
5344-10-04 18:40:08.165 5344-10-03 16:37:04.165 Unknown NULL NULL 5344 2018-03-08 23:04:59 40 NULL 5344-10-05
5397-07-13 07:12:32.000896438 5397-07-12 05:09:28.000896438 Unknown NULL NULL 5397 2018-03-08 23:04:59 12 12 5397-07-14
-5966-07-09 03:30:50.597 5966-07-08 01:27:46.597 Unknown NULL NULL 5966 2018-03-08 23:04:59 30 30 5967-07-09
-6229-06-28 02:54:28.970117179 6229-06-27 00:51:24.970117179 Unknown NULL NULL 6229 2018-03-08 23:04:59 54 54 6230-06-28
-6482-04-27 12:07:38.073915413 6482-04-26 10:04:34.073915413 Unknown NULL NULL 6482 2018-03-08 23:04:59 7 7 6482-04-28
+5966-07-09 03:30:50.597 5966-07-08 01:27:46.597 Unknown NULL NULL 5966 2018-03-08 23:04:59 30 30 5966-07-10
+6229-06-28 02:54:28.970117179 6229-06-27 00:51:24.970117179 Unknown NULL NULL 6229 2018-03-08 23:04:59 54 54 6229-06-29
+6482-04-27 12:07:38.073915413 6482-04-26 10:04:34.073915413 Unknown NULL NULL 6482 2018-03-08 23:04:59 7 7 6483-04-27
6631-11-13 16:31:29.702202248 6631-11-12 14:28:25.702202248 Unknown NULL NULL 6631 2018-03-08 23:04:59 31 31 6631-11-14
6705-09-28 18:27:28.000845672 6705-09-27 16:24:24.000845672 Unknown NULL NULL 6705 2018-03-08 23:04:59 27 NULL 6705-09-29
6731-02-12 08:12:48.287783702 6731-02-11 06:09:44.287783702 Unknown NULL NULL 6731 2018-03-08 23:04:59 12 NULL 6731-02-13
-7160-12-02 06:00:24.81200852 7160-12-01 03:57:20.81200852 Unknown NULL NULL 7160 2018-03-08 23:04:59 0 NULL 7161-12-02
-7409-09-07 23:33:32.459349602 7409-09-06 21:30:28.459349602 Unknown NULL NULL 7409 2018-03-08 23:04:59 33 NULL 7409-09-08
+7160-12-02 06:00:24.81200852 7160-12-01 03:57:20.81200852 Unknown NULL NULL 7160 2018-03-08 23:04:59 0 NULL 7160-12-03
+7409-09-07 23:33:32.459349602 7409-09-06 21:30:28.459349602 Unknown NULL NULL 7409 2018-03-08 23:04:59 33 NULL 7410-09-07
7503-06-23 23:14:17.486 7503-06-22 21:11:13.486 Unknown NULL NULL 7503 2018-03-08 23:04:59 14 NULL 7503-06-24
8422-07-22 03:21:45.745036084 8422-07-21 01:18:41.745036084 Unknown NULL NULL 8422 2018-03-08 23:04:59 21 NULL 8422-07-23
8521-01-16 20:42:05.668832388 8521-01-15 18:39:01.668832388 Unknown NULL NULL 8521 2018-03-08 23:04:59 42 NULL 8521-01-17
9075-06-13 16:20:09.218517797 9075-06-12 14:17:05.218517797 Unknown NULL NULL 9075 2018-03-08 23:04:59 20 NULL 9075-06-14
9209-11-11 04:08:58.223768453 9209-11-10 02:05:54.223768453 Unknown NULL NULL 9209 2018-03-08 23:04:59 8 NULL 9209-11-12
-9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9403-01-10
+9403-01-09 18:12:33.547 9403-01-08 16:09:29.547 Unknown NULL NULL 9403 2018-03-08 23:04:59 12 NULL 9404-01-09
diff --git a/ql/src/test/results/clientpositive/vector_data_types.q.out b/ql/src/test/results/clientpositive/vector_data_types.q.out
index c2a2fce075..3fc70401c7 100644
--- a/ql/src/test/results/clientpositive/vector_data_types.q.out
+++ b/ql/src/test/results/clientpositive/vector_data_types.q.out
@@ -127,23 +127,28 @@ STAGE PLANS:
Map Reduce
Map Operator Tree:
TableScan
+<<<<<<< HEAD
alias: over1korc_n1
Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE
+=======
+ alias: over1korc
+ Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+>>>>>>> HIVE-12192
Select Operator
expressions: t (type: tinyint), si (type: smallint), i (type: int), b (type: bigint), f (type: float), d (type: double), bo (type: boolean), s (type: string), ts (type: timestamp), dec (type: decimal(4,2)), bin (type: binary)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
- Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int)
sort order: +++
- Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
TopN Hash Memory Usage: 0.1
value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: boolean), _col7 (type: string), _col8 (type: timestamp), _col9 (type: decimal(4,2)), _col10 (type: binary)
Reduce Operator Tree:
Select Operator
expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: boolean), VALUE._col4 (type: string), VALUE._col5 (type: timestamp), VALUE._col6 (type: decimal(4,2)), VALUE._col7 (type: binary)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
- Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
Limit
Number of rows: 20
Statistics: Num rows: 20 Data size: 5920 Basic stats: COMPLETE Column stats: NONE
@@ -199,8 +204,13 @@ FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY
POSTHOOK: type: QUERY
POSTHOOK: Input: default@over1korc_n1
#### A masked pattern was here ####
+<<<<<<< HEAD
-17045922556
PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION select t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i LIMIT 20
+=======
+-25838728092
+PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION select t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc ORDER BY t, si, i LIMIT 20
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN VECTORIZATION EXPRESSION select t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY t, si, i LIMIT 20
POSTHOOK: type: QUERY
@@ -217,8 +227,13 @@ STAGE PLANS:
Map Reduce
Map Operator Tree:
TableScan
+<<<<<<< HEAD
alias: over1korc_n1
Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE
+=======
+ alias: over1korc
+ Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+>>>>>>> HIVE-12192
TableScan Vectorization:
native: true
Select Operator
@@ -228,7 +243,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
- Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int)
sort order: +++
@@ -237,7 +252,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
TopN Hash Memory Usage: 0.1
value expressions: _col3 (type: bigint), _col4 (type: float), _col5 (type: double), _col6 (type: boolean), _col7 (type: string), _col8 (type: timestamp), _col9 (type: decimal(4,2)), _col10 (type: binary)
Execution mode: vectorized
@@ -258,7 +273,7 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: smallint), KEY.reducesinkkey2 (type: int), VALUE._col0 (type: bigint), VALUE._col1 (type: float), VALUE._col2 (type: double), VALUE._col3 (type: boolean), VALUE._col4 (type: string), VALUE._col5 (type: timestamp), VALUE._col6 (type: decimal(4,2)), VALUE._col7 (type: binary)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
- Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
Limit
Number of rows: 20
Statistics: Num rows: 20 Data size: 5920 Basic stats: COMPLETE Column stats: NONE
@@ -325,8 +340,13 @@ STAGE PLANS:
Map Reduce
Map Operator Tree:
TableScan
+<<<<<<< HEAD
alias: over1korc_n1
Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE
+=======
+ alias: over1korc
+ Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+>>>>>>> HIVE-12192
TableScan Vectorization:
native: true
Select Operator
@@ -337,7 +357,7 @@ STAGE PLANS:
native: true
projectedOutputColumnNums: [12]
selectExpressions: VectorUDFAdaptor(hash(t,si,i,b,f,d,bo,s,ts,dec,bin)) -> 12:int
- Statistics: Num rows: 1050 Data size: 311254 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1050 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: sum(_col0)
Group By Vectorization:
@@ -403,4 +423,4 @@ FROM (SELECT t, si, i, b, f, d, bo, s, ts, `dec`, bin FROM over1korc_n1 ORDER BY
POSTHOOK: type: QUERY
POSTHOOK: Input: default@over1korc_n1
#### A masked pattern was here ####
--17045922556
+-25838728092
diff --git a/ql/src/test/results/clientpositive/vector_decimal_1.q.out b/ql/src/test/results/clientpositive/vector_decimal_1.q.out
index 80def64290..df2070ec26 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_1.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_1.q.out
@@ -861,7 +861,7 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_1
#### A masked pattern was here ####
NULL
-1969-12-31 16:00:17.29
+1970-01-01 00:00:17.29
PREHOOK: query: drop table decimal_1
PREHOOK: type: DROPTABLE
PREHOOK: Input: default@decimal_1
diff --git a/ql/src/test/results/clientpositive/vector_decimal_cast.q.out b/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
index 085064823e..96e12a292b 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_cast.q.out
@@ -83,16 +83,16 @@ POSTHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS D
POSTHOOK: type: QUERY
POSTHOOK: Input: default@alltypesorc
#### A masked pattern was here ####
--13326.0 528534767 true 1969-12-31 15:59:46.674 -13326.0000000000 528534767.00000000000000 1.00 -13
--15813.0 528534767 true 1969-12-31 15:59:55.787 -15813.0000000000 528534767.00000000000000 1.00 -4
--9566.0 528534767 true 1969-12-31 15:59:44.187 -9566.0000000000 528534767.00000000000000 1.00 -16
-15007.0 528534767 true 1969-12-31 15:59:50.434 15007.0000000000 528534767.00000000000000 1.00 -10
-7021.0 528534767 true 1969-12-31 16:00:15.007 7021.0000000000 528534767.00000000000000 1.00 15
-4963.0 528534767 true 1969-12-31 16:00:07.021 4963.0000000000 528534767.00000000000000 1.00 7
--7824.0 528534767 true 1969-12-31 16:00:04.963 -7824.0000000000 528534767.00000000000000 1.00 5
--15431.0 528534767 true 1969-12-31 15:59:52.176 -15431.0000000000 528534767.00000000000000 1.00 -8
--15549.0 528534767 true 1969-12-31 15:59:44.569 -15549.0000000000 528534767.00000000000000 1.00 -15
-5780.0 528534767 true 1969-12-31 15:59:44.451 5780.0000000000 528534767.00000000000000 1.00 -16
+-13326.0 528534767 true 1969-12-31 15:59:46.674 -13326.0000000000 528534767.00000000000000 1.00 -28813
+-15813.0 528534767 true 1969-12-31 15:59:55.787 -15813.0000000000 528534767.00000000000000 1.00 -28804
+-9566.0 528534767 true 1969-12-31 15:59:44.187 -9566.0000000000 528534767.00000000000000 1.00 -28816
+15007.0 528534767 true 1969-12-31 15:59:50.434 15007.0000000000 528534767.00000000000000 1.00 -28810
+7021.0 528534767 true 1969-12-31 16:00:15.007 7021.0000000000 528534767.00000000000000 1.00 -28785
+4963.0 528534767 true 1969-12-31 16:00:07.021 4963.0000000000 528534767.00000000000000 1.00 -28793
+-7824.0 528534767 true 1969-12-31 16:00:04.963 -7824.0000000000 528534767.00000000000000 1.00 -28795
+-15431.0 528534767 true 1969-12-31 15:59:52.176 -15431.0000000000 528534767.00000000000000 1.00 -28808
+-15549.0 528534767 true 1969-12-31 15:59:44.569 -15549.0000000000 528534767.00000000000000 1.00 -28815
+5780.0 528534767 true 1969-12-31 15:59:44.451 5780.0000000000 528534767.00000000000000 1.00 -28816
PREHOOK: query: CREATE TABLE alltypes_small STORED AS TEXTFILE AS SELECT * FROM alltypesorc
PREHOOK: type: CREATETABLE_AS_SELECT
PREHOOK: Input: default@alltypesorc
@@ -202,13 +202,13 @@ POSTHOOK: query: SELECT cdouble, cint, cboolean1, ctimestamp1, CAST(cdouble AS D
POSTHOOK: type: QUERY
POSTHOOK: Input: default@alltypes_small
#### A masked pattern was here ####
--13326.0 528534767 true 1969-12-31 15:59:46.674 -13326.0000000000 528534767.00000000000000 1.00 -13
--15813.0 528534767 true 1969-12-31 15:59:55.787 -15813.0000000000 528534767.00000000000000 1.00 -4
--9566.0 528534767 true 1969-12-31 15:59:44.187 -9566.0000000000 528534767.00000000000000 1.00 -16
-15007.0 528534767 true 1969-12-31 15:59:50.434 15007.0000000000 528534767.00000000000000 1.00 -10
-7021.0 528534767 true 1969-12-31 16:00:15.007 7021.0000000000 528534767.00000000000000 1.00 15
-4963.0 528534767 true 1969-12-31 16:00:07.021 4963.0000000000 528534767.00000000000000 1.00 7
--7824.0 528534767 true 1969-12-31 16:00:04.963 -7824.0000000000 528534767.00000000000000 1.00 5
--15431.0 528534767 true 1969-12-31 15:59:52.176 -15431.0000000000 528534767.00000000000000 1.00 -8
--15549.0 528534767 true 1969-12-31 15:59:44.569 -15549.0000000000 528534767.00000000000000 1.00 -15
-5780.0 528534767 true 1969-12-31 15:59:44.451 5780.0000000000 528534767.00000000000000 1.00 -16
+-13326.0 528534767 true 1969-12-31 15:59:46.674 -13326.0000000000 528534767.00000000000000 1.00 -28813
+-15813.0 528534767 true 1969-12-31 15:59:55.787 -15813.0000000000 528534767.00000000000000 1.00 -28804
+-9566.0 528534767 true 1969-12-31 15:59:44.187 -9566.0000000000 528534767.00000000000000 1.00 -28816
+15007.0 528534767 true 1969-12-31 15:59:50.434 15007.0000000000 528534767.00000000000000 1.00 -28810
+7021.0 528534767 true 1969-12-31 16:00:15.007 7021.0000000000 528534767.00000000000000 1.00 -28785
+4963.0 528534767 true 1969-12-31 16:00:07.021 4963.0000000000 528534767.00000000000000 1.00 -28793
+-7824.0 528534767 true 1969-12-31 16:00:04.963 -7824.0000000000 528534767.00000000000000 1.00 -28795
+-15431.0 528534767 true 1969-12-31 15:59:52.176 -15431.0000000000 528534767.00000000000000 1.00 -28808
+-15549.0 528534767 true 1969-12-31 15:59:44.569 -15549.0000000000 528534767.00000000000000 1.00 -28815
+5780.0 528534767 true 1969-12-31 15:59:44.451 5780.0000000000 528534767.00000000000000 1.00 -28816
diff --git a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
index c296c306b3..9d1f5f7cc4 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out
@@ -136,16 +136,16 @@ LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_test_n1
#### A masked pattern was here ####
-1836.44199584197700 -1166.02723492725400 0.8372697814834 245972.55810810255804469 5.6189189189 835 1000 NULL 835 true 1000.823076923077 835.6189 1000.823076923077 1969-12-31 16:13:55.618918918
-1856.13222453224620 -1178.52931392929240 0.8372449787014 251275.44324324968747899 4.5783783784 844 1011 NULL 844 true 1011.5538461538462 844.57837 1011.5538461538462 1969-12-31 16:14:04.578378378
-1858.75758835761550 -1180.19625779623100 0.8372417113669 251986.76756757564861519 5.7729729730 845 1012 NULL 845 true 1012.9846153846155 845.77295 1012.9846153846155 1969-12-31 16:14:05.772972973
-1862.69563409566930 -1182.69667359663860 0.8372368276345 253055.63918919969667286 7.5648648649 847 1015 NULL 847 true 1015.1307692307693 847.5649 1015.1307692307693 1969-12-31 16:14:07.564864864
-1883.69854469852330 -1196.03222453224660 0.8372111259286 258794.49324323677116559 7.1216216216 857 1026 NULL 857 true 1026.5769230769233 857.12164 1026.5769230769233 1969-12-31 16:14:17.121621621
-1886.32390852389240 -1197.69916839918480 0.8372079534582 259516.37432431944456816 8.3162162162 858 1028 NULL 858 true 1028.0076923076924 858.3162 1028.0076923076924 1969-12-31 16:14:18.316216216
-1887.63659043657700 -1198.53264033265400 0.8372063705322 259877.69189188782259834 8.9135135135 858 1028 NULL 858 true 1028.723076923077 858.9135 1028.723076923077 1969-12-31 16:14:18.913513513
-1895.51268191268460 -1203.53347193346920 0.8371969190171 262050.87567567649292835 2.4972972973 862 1033 NULL 862 true 1033.0153846153846 862.4973 1033.0153846153846 1969-12-31 16:14:22.497297297
-1909.95218295221550 -1212.70166320163100 0.8371797936946 266058.54729730725574014 9.0675675676 869 1040 NULL 869 true 1040.8846153846155 869.06757 1040.8846153846155 1969-12-31 16:14:29.067567567
-1913.89022869026920 -1215.20207900203840 0.8371751679996 267156.82702703945592392 0.8594594595 870 1043 NULL 870 true 1043.0307692307692 870.85944 1043.0307692307692 1969-12-31 16:14:30.859459459
+1836.44199584197700 -1166.02723492725400 0.8372697814834 245972.55810810255804469 5.6189189189 835 1000 NULL 835 true 1000.823076923077 835.6189 1000.823076923077 1970-01-01 00:13:55.618918918
+1856.13222453224620 -1178.52931392929240 0.8372449787014 251275.44324324968747899 4.5783783784 844 1011 NULL 844 true 1011.5538461538462 844.57837 1011.5538461538462 1970-01-01 00:14:04.578378378
+1858.75758835761550 -1180.19625779623100 0.8372417113669 251986.76756757564861519 5.7729729730 845 1012 NULL 845 true 1012.9846153846155 845.77295 1012.9846153846155 1970-01-01 00:14:05.772972973
+1862.69563409566930 -1182.69667359663860 0.8372368276345 253055.63918919969667286 7.5648648649 847 1015 NULL 847 true 1015.1307692307693 847.5649 1015.1307692307693 1970-01-01 00:14:07.564864864
+1883.69854469852330 -1196.03222453224660 0.8372111259286 258794.49324323677116559 7.1216216216 857 1026 NULL 857 true 1026.5769230769233 857.12164 1026.5769230769233 1970-01-01 00:14:17.121621621
+1886.32390852389240 -1197.69916839918480 0.8372079534582 259516.37432431944456816 8.3162162162 858 1028 NULL 858 true 1028.0076923076924 858.3162 1028.0076923076924 1970-01-01 00:14:18.316216216
+1887.63659043657700 -1198.53264033265400 0.8372063705322 259877.69189188782259834 8.9135135135 858 1028 NULL 858 true 1028.723076923077 858.9135 1028.723076923077 1970-01-01 00:14:18.913513513
+1895.51268191268460 -1203.53347193346920 0.8371969190171 262050.87567567649292835 2.4972972973 862 1033 NULL 862 true 1033.0153846153846 862.4973 1033.0153846153846 1970-01-01 00:14:22.497297297
+1909.95218295221550 -1212.70166320163100 0.8371797936946 266058.54729730725574014 9.0675675676 869 1040 NULL 869 true 1040.8846153846155 869.06757 1040.8846153846155 1970-01-01 00:14:29.067567567
+1913.89022869026920 -1215.20207900203840 0.8371751679996 267156.82702703945592392 0.8594594595 870 1043 NULL 870 true 1043.0307692307692 870.85944 1043.0307692307692 1970-01-01 00:14:30.859459459
PREHOOK: query: SELECT SUM(HASH(*))
FROM (SELECT cdecimal1 + cdecimal2 as c1, cdecimal1 - (2*cdecimal2) as c2, ((cdecimal1+2.34)/cdecimal2) as c3, (cdecimal1 * (cdecimal2/3.4)) as c4, cdecimal1 % 10 as c5, CAST(cdecimal1 AS INT) as c6, CAST(cdecimal2 AS SMALLINT) as c7, CAST(cdecimal2 AS TINYINT) as c8, CAST(cdecimal1 AS BIGINT) as c9, CAST (cdecimal1 AS BOOLEAN) as c10, CAST(cdecimal2 AS DOUBLE) as c11, CAST(cdecimal1 AS FLOAT) as c12, CAST(cdecimal2 AS STRING) as c13, CAST(cdecimal1 AS TIMESTAMP) as c14 FROM decimal_test_n1 WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL
ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q
@@ -280,16 +280,16 @@ LIMIT 10
POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_test_small_n0
#### A masked pattern was here ####
-1836.439 -1166.021 0.83727243660 245971.826152056 5.619 835 1000 NULL 835 true 1000.82 835.619 1000.82 1969-12-31 16:13:55.619
-1856.128 -1178.522 0.83724778805 251274.375364068 4.578 844 1011 NULL 844 true 1011.55 844.578 1011.55 1969-12-31 16:14:04.578
-1858.753 -1180.187 0.83724555273 251985.627412262 5.773 845 1012 NULL 845 true 1012.98 845.773 1012.98 1969-12-31 16:14:05.773
-1862.695 -1182.695 0.83723759518 253055.487729555 7.565 847 1015 NULL 847 true 1015.13 847.565 1015.13 1969-12-31 16:14:07.565
-1883.702 -1196.038 0.83720898517 258795.383063868 7.122 857 1026 NULL 857 true 1026.58 857.122 1026.58 1969-12-31 16:14:17.122
-1886.326 -1197.704 0.83720586376 259516.891214712 8.316 858 1028 NULL 858 true 1028.01 858.316 1028.01 1969-12-31 16:14:18.316
-1887.634 -1198.526 0.83720934754 259877.061889284 8.914 858 1028 NULL 858 true 1028.72 858.914 1028.72 1969-12-31 16:14:18.914
-1895.517 -1203.543 0.83719289075 262051.956361764 2.497 862 1033 NULL 862 true 1033.02 862.497 1033.02 1969-12-31 16:14:22.497
-1909.948 -1212.692 0.83718392130 266057.499543968 9.068 869 1040 NULL 869 true 1040.88 869.068 1040.88 1969-12-31 16:14:29.068
-1913.889 -1215.201 0.83717534491 267156.488691411 0.859 870 1043 NULL 870 true 1043.03 870.859 1043.03 1969-12-31 16:14:30.859
+1836.439 -1166.021 0.83727243660 245971.826152056 5.619 835 1000 NULL 835 true 1000.82 835.619 1000.82 1970-01-01 00:13:55.619
+1856.128 -1178.522 0.83724778805 251274.375364068 4.578 844 1011 NULL 844 true 1011.55 844.578 1011.55 1970-01-01 00:14:04.578
+1858.753 -1180.187 0.83724555273 251985.627412262 5.773 845 1012 NULL 845 true 1012.98 845.773 1012.98 1970-01-01 00:14:05.773
+1862.695 -1182.695 0.83723759518 253055.487729555 7.565 847 1015 NULL 847 true 1015.13 847.565 1015.13 1970-01-01 00:14:07.565
+1883.702 -1196.038 0.83720898517 258795.383063868 7.122 857 1026 NULL 857 true 1026.58 857.122 1026.58 1970-01-01 00:14:17.122
+1886.326 -1197.704 0.83720586376 259516.891214712 8.316 858 1028 NULL 858 true 1028.01 858.316 1028.01 1970-01-01 00:14:18.316
+1887.634 -1198.526 0.83720934754 259877.061889284 8.914 858 1028 NULL 858 true 1028.72 858.914 1028.72 1970-01-01 00:14:18.914
+1895.517 -1203.543 0.83719289075 262051.956361764 2.497 862 1033 NULL 862 true 1033.02 862.497 1033.02 1970-01-01 00:14:22.497
+1909.948 -1212.692 0.83718392130 266057.499543968 9.068 869 1040 NULL 869 true 1040.88 869.068 1040.88 1970-01-01 00:14:29.068
+1913.889 -1215.201 0.83717534491 267156.488691411 0.859 870 1043 NULL 870 true 1043.03 870.859 1043.03 1970-01-01 00:14:30.859
PREHOOK: query: SELECT SUM(HASH(*))
FROM (SELECT cdecimal1 + cdecimal2 as c1, cdecimal1 - (2*cdecimal2) as c2, ((cdecimal1+2.34)/cdecimal2) as c3, (cdecimal1 * (cdecimal2/3.4)) as c4, cdecimal1 % 10 as c5, CAST(cdecimal1 AS INT) as c6, CAST(cdecimal2 AS SMALLINT) as c7, CAST(cdecimal2 AS TINYINT) as c8, CAST(cdecimal1 AS BIGINT) as c9, CAST (cdecimal1 AS BOOLEAN) as c10, CAST(cdecimal2 AS DOUBLE) as c11, CAST(cdecimal1 AS FLOAT) as c12, CAST(cdecimal2 AS STRING) as c13, CAST(cdecimal1 AS TIMESTAMP) as c14 FROM decimal_test_small_n0 WHERE cdecimal1 > 0 AND cdecimal1 < 12345.5678 AND cdecimal2 != 0 AND cdecimal2 > 1000 AND cdouble IS NOT NULL
ORDER BY c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14) q
diff --git a/ql/src/test/results/clientpositive/vector_interval_1.q.out b/ql/src/test/results/clientpositive/vector_interval_1.q.out
index 8c0086e300..ddf5220537 100644
--- a/ql/src/test/results/clientpositive/vector_interval_1.q.out
+++ b/ql/src/test/results/clientpositive/vector_interval_1.q.out
@@ -80,7 +80,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: vector_interval_1
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -91,7 +91,7 @@ STAGE PLANS:
native: true
projectedOutputColumnNums: [2, 5, 6]
selectExpressions: CastStringToIntervalYearMonth(col 2:string) -> 5:interval_year_month, CastStringToIntervalDayTime(col 3:string) -> 6:interval_day_time
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
@@ -100,7 +100,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: interval_year_month), _col2 (type: interval_day_time)
Execution mode: vectorized
Map Vectorization:
@@ -120,10 +120,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: string), INTERVAL'1-2' (type: interval_year_month), VALUE._col0 (type: interval_year_month), INTERVAL'1 02:03:04.000000000' (type: interval_day_time), VALUE._col1 (type: interval_day_time)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -191,7 +191,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: vector_interval_1
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -202,7 +202,7 @@ STAGE PLANS:
native: true
projectedOutputColumnNums: [1, 7, 6, 9, 8]
selectExpressions: IntervalYearMonthColAddIntervalYearMonthColumn(col 5:interval_year_month, col 6:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 5:interval_year_month, CastStringToIntervalYearMonth(col 2:string) -> 6:interval_year_month) -> 7:interval_year_month, IntervalYearMonthScalarAddIntervalYearMonthColumn(val 14, col 5:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 5:interval_year_month) -> 6:interval_year_month, IntervalYearMonthColSubtractIntervalYearMonthColumn(col 5:interval_year_month, col 8:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 5:interval_year_month, CastStringToIntervalYearMonth(col 2:string) -> 8:interval_year_month) -> 9:interval_year_month, IntervalYearMonthScalarSubtractIntervalYearMonthColumn(val 14, col 5:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 5:interval_year_month) -> 8:interval_year_month
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: date)
sort order: +
@@ -211,7 +211,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: interval_year_month), _col2 (type: interval_year_month), _col3 (type: interval_year_month), _col4 (type: interval_year_month)
Execution mode: vectorized
Map Vectorization:
@@ -231,10 +231,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: date), INTERVAL'2-4' (type: interval_year_month), VALUE._col0 (type: interval_year_month), VALUE._col1 (type: interval_year_month), INTERVAL'0-0' (type: interval_year_month), VALUE._col2 (type: interval_year_month), VALUE._col3 (type: interval_year_month)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -310,7 +310,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: vector_interval_1
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -321,7 +321,7 @@ STAGE PLANS:
native: true
projectedOutputColumnNums: [1, 7, 6, 9, 8]
selectExpressions: IntervalDayTimeColAddIntervalDayTimeColumn(col 5:interval_day_time, col 6:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 5:interval_day_time, CastStringToIntervalDayTime(col 3:string) -> 6:interval_day_time) -> 7:interval_day_time, IntervalDayTimeScalarAddIntervalDayTimeColumn(val 1 02:03:04.000000000, col 5:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 5:interval_day_time) -> 6:interval_day_time, IntervalDayTimeColSubtractIntervalDayTimeColumn(col 5:interval_day_time, col 8:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 5:interval_day_time, CastStringToIntervalDayTime(col 3:string) -> 8:interval_day_time) -> 9:interval_day_time, IntervalDayTimeScalarSubtractIntervalDayTimeColumn(val 1 02:03:04.000000000, col 5:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 5:interval_day_time) -> 8:interval_day_time
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: date)
sort order: +
@@ -330,7 +330,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time)
Execution mode: vectorized
Map Vectorization:
@@ -350,10 +350,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: date), INTERVAL'2 04:06:08.000000000' (type: interval_day_time), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), INTERVAL'0 00:00:00.000000000' (type: interval_day_time), VALUE._col2 (type: interval_day_time), VALUE._col3 (type: interval_day_time)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -441,7 +441,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: vector_interval_1
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -452,7 +452,7 @@ STAGE PLANS:
native: true
projectedOutputColumnNums: [1, 5, 7, 6, 9, 8, 11, 12, 14, 15, 16, 17, 18]
selectExpressions: DateColAddIntervalYearMonthScalar(col 1:date, val 1-2) -> 5:date, DateColAddIntervalYearMonthColumn(col 1:date, col 6:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 6:interval_year_month) -> 7:date, IntervalYearMonthScalarAddDateColumn(val 1-2, col 1:interval_year_month) -> 6:date, IntervalYearMonthColAddDateColumn(col 8:interval_year_month, col 1:date)(children: CastStringToIntervalYearMonth(col 2:string) -> 8:interval_year_month) -> 9:date, DateColSubtractIntervalYearMonthScalar(col 1:date, val 1-2) -> 8:date, DateColSubtractIntervalYearMonthColumn(col 1:date, col 10:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 10:interval_year_month) -> 11:date, DateColAddIntervalDayTimeScalar(col 1:date, val 1 02:03:04.000000000) -> 12:timestamp, DateColAddIntervalDayTimeColumn(col 1:date, col 13:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 14:timestamp, IntervalDayTimeScalarAddDateColumn(val 1 02:03:04.000000000, col 1:date) -> 15:timestamp, IntervalDayTimeColAddDateColumn(col 13:interval_day_time, col 1:date)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 16:timestamp, DateColSubtractIntervalDayTimeScalar(col 1:date, val 1 02:03:04.000000000) -> 17:timestamp, DateColSubtractIntervalDayTimeColumn(col 1:date, col 13:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 18:timestamp
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: date)
sort order: +
@@ -461,7 +461,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: date), _col2 (type: date), _col3 (type: date), _col4 (type: date), _col5 (type: date), _col6 (type: date), _col7 (type: timestamp), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp)
Execution mode: vectorized
Map Vectorization:
@@ -481,10 +481,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: date), VALUE._col1 (type: date), VALUE._col2 (type: date), VALUE._col3 (type: date), VALUE._col4 (type: date), VALUE._col5 (type: date), VALUE._col6 (type: timestamp), VALUE._col7 (type: timestamp), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -584,7 +584,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: vector_interval_1
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -595,7 +595,7 @@ STAGE PLANS:
native: true
projectedOutputColumnNums: [0, 5, 7, 8, 9, 10, 11, 12, 14, 15, 16, 17, 18]
selectExpressions: TimestampColAddIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 5:timestamp, TimestampColAddIntervalYearMonthColumn(col 0:timestamp, col 6:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 6:interval_year_month) -> 7:timestamp, IntervalYearMonthScalarAddTimestampColumn(val 1-2, col 0:interval_year_month) -> 8:timestamp, IntervalYearMonthColAddTimestampColumn(col 6:interval_year_month, col 0:timestamp)(children: CastStringToIntervalYearMonth(col 2:string) -> 6:interval_year_month) -> 9:timestamp, TimestampColSubtractIntervalYearMonthScalar(col 0:timestamp, val 1-2) -> 10:timestamp, TimestampColSubtractIntervalYearMonthColumn(col 0:timestamp, col 6:interval_year_month)(children: CastStringToIntervalYearMonth(col 2:string) -> 6:interval_year_month) -> 11:timestamp, TimestampColAddIntervalDayTimeScalar(col 0:timestamp, val 1 02:03:04.000000000) -> 12:timestamp, TimestampColAddIntervalDayTimeColumn(col 0:timestamp, col 13:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 14:timestamp, IntervalDayTimeScalarAddTimestampColumn(val 1 02:03:04.000000000, col 0:timestamp) -> 15:timestamp, IntervalDayTimeColAddTimestampColumn(col 13:interval_day_time, col 0:timestamp)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 16:timestamp, TimestampColSubtractIntervalDayTimeScalar(col 0:timestamp, val 1 02:03:04.000000000) -> 17:timestamp, TimestampColSubtractIntervalDayTimeColumn(col 0:timestamp, col 13:interval_day_time)(children: CastStringToIntervalDayTime(col 3:string) -> 13:interval_day_time) -> 18:timestamp
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: timestamp)
sort order: +
@@ -604,7 +604,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp), _col7 (type: timestamp), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp)
Execution mode: vectorized
Map Vectorization:
@@ -624,10 +624,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: timestamp), VALUE._col1 (type: timestamp), VALUE._col2 (type: timestamp), VALUE._col3 (type: timestamp), VALUE._col4 (type: timestamp), VALUE._col5 (type: timestamp), VALUE._col6 (type: timestamp), VALUE._col7 (type: timestamp), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -709,18 +709,18 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: vector_interval_1
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
- expressions: ts (type: timestamp), (ts - ts) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03.0' - ts) (type: interval_day_time), (ts - TIMESTAMP'2001-01-01 01:02:03.0') (type: interval_day_time)
+ expressions: ts (type: timestamp), (ts - ts) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03' - ts) (type: interval_day_time), (ts - TIMESTAMP'2001-01-01 01:02:03') (type: interval_day_time)
outputColumnNames: _col0, _col1, _col2, _col3
Select Vectorization:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [0, 5, 6, 7]
- selectExpressions: TimestampColSubtractTimestampColumn(col 0:timestamp, col 0:timestamp) -> 5:interval_day_time, TimestampScalarSubtractTimestampColumn(val 2001-01-01 01:02:03.0, col 0:timestamp) -> 6:interval_day_time, TimestampColSubtractTimestampScalar(col 0:timestamp, val 2001-01-01 01:02:03.0) -> 7:interval_day_time
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ selectExpressions: TimestampColSubtractTimestampColumn(col 0:timestamp, col 0:timestamp) -> 5:interval_day_time, TimestampScalarSubtractTimestampColumn(val 2000-12-31 17:02:03.0, col 0:timestamp) -> 6:interval_day_time, TimestampColSubtractTimestampScalar(col 0:timestamp, val 2000-12-31 17:02:03.0) -> 7:interval_day_time
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: timestamp)
sort order: +
@@ -729,7 +729,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time)
Execution mode: vectorized
Map Vectorization:
@@ -749,10 +749,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -816,7 +816,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: vector_interval_1
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -826,8 +826,8 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [1, 5, 6, 7]
- selectExpressions: DateColSubtractDateColumn(col 1:date, col 1:date) -> 5:interval_day_time, DateScalarSubtractDateColumn(val 2001-01-01 00:00:00.0, col 1:date) -> 6:interval_day_time, DateColSubtractDateScalar(col 1:date, val 2001-01-01 00:00:00.0) -> 7:interval_day_time
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ selectExpressions: DateColSubtractDateColumn(col 1:date, col 1:date) -> 5:interval_day_time, DateScalarSubtractDateColumn(val 2000-12-31 16:00:00.0, col 1:date) -> 6:interval_day_time, DateColSubtractDateScalar(col 1:date, val 2000-12-31 16:00:00.0) -> 7:interval_day_time
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: date)
sort order: +
@@ -836,7 +836,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time)
Execution mode: vectorized
Map Vectorization:
@@ -856,10 +856,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -929,18 +929,18 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: vector_interval_1
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
- expressions: dt (type: date), (ts - dt) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03.0' - dt) (type: interval_day_time), (ts - DATE'2001-01-01') (type: interval_day_time), (dt - ts) (type: interval_day_time), (dt - TIMESTAMP'2001-01-01 01:02:03.0') (type: interval_day_time), (DATE'2001-01-01' - ts) (type: interval_day_time)
+ expressions: dt (type: date), (ts - dt) (type: interval_day_time), (TIMESTAMP'2001-01-01 01:02:03' - dt) (type: interval_day_time), (ts - DATE'2001-01-01') (type: interval_day_time), (dt - ts) (type: interval_day_time), (dt - TIMESTAMP'2001-01-01 01:02:03') (type: interval_day_time), (DATE'2001-01-01' - ts) (type: interval_day_time)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
Select Vectorization:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [1, 5, 6, 7, 8, 9, 10]
- selectExpressions: TimestampColSubtractDateColumn(col 0:timestamp, col 1:date) -> 5:interval_day_time, TimestampScalarSubtractDateColumn(val 2001-01-01 01:02:03.0, col 1:date) -> 6:interval_day_time, TimestampColSubtractDateScalar(col 0:timestamp, val 2001-01-01 00:00:00.0) -> 7:interval_day_time, DateColSubtractTimestampColumn(col 1:date, col 0:timestamp) -> 8:interval_day_time, DateColSubtractTimestampScalar(col 1:date, val 2001-01-01 01:02:03.0) -> 9:interval_day_time, DateScalarSubtractTimestampColumn(val 2001-01-01 00:00:00.0, col 0:timestamp) -> 10:interval_day_time
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ selectExpressions: TimestampColSubtractDateColumn(col 0:timestamp, col 1:date) -> 5:interval_day_time, TimestampScalarSubtractDateColumn(val 2000-12-31 17:02:03.0, col 1:date) -> 6:interval_day_time, TimestampColSubtractDateScalar(col 0:timestamp, val 2000-12-31 16:00:00.0) -> 7:interval_day_time, DateColSubtractTimestampColumn(col 1:date, col 0:timestamp) -> 8:interval_day_time, DateColSubtractTimestampScalar(col 1:date, val 2000-12-31 17:02:03.0) -> 9:interval_day_time, DateScalarSubtractTimestampColumn(val 2000-12-31 16:00:00.0, col 0:timestamp) -> 10:interval_day_time
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: date)
sort order: +
@@ -949,7 +949,7 @@ STAGE PLANS:
native: false
nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time), _col5 (type: interval_day_time), _col6 (type: interval_day_time)
Execution mode: vectorized
Map Vectorization:
@@ -969,10 +969,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time), VALUE._col3 (type: interval_day_time), VALUE._col4 (type: interval_day_time), VALUE._col5 (type: interval_day_time)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 274 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
diff --git a/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out b/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out
index 1547942d58..b4f3a8d6d5 100644
--- a/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out
+++ b/ql/src/test/results/clientpositive/vector_interval_arithmetic.q.out
@@ -158,7 +158,7 @@ POSTHOOK: Input: default@interval_arithmetic_1
dateval _c1 _c2 _c3 _c4 _c5 _c6
0004-09-22 0002-07-22 0006-11-22 0006-11-22 0002-07-22 0002-07-22 0006-11-22
0528-10-27 0526-08-27 0530-12-27 0530-12-27 0526-08-27 0526-08-27 0530-12-27
-1319-02-02 1316-12-02 1321-04-02 1321-04-02 1316-12-02 1316-12-02 1321-04-02
+1319-02-02 1316-12-03 1321-04-02 1321-04-02 1316-12-03 1316-12-03 1321-04-02
1404-07-23 1402-05-23 1406-09-23 1406-09-23 1402-05-23 1402-05-23 1406-09-23
1815-05-06 1813-03-06 1817-07-06 1817-07-06 1813-03-06 1813-03-06 1817-07-06
1883-04-17 1881-02-17 1885-06-17 1885-06-17 1881-02-17 1881-02-17 1885-06-17
@@ -249,7 +249,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [0, 3, 4, 5]
- selectExpressions: DateColSubtractDateScalar(col 0:date, val 1999-06-07 00:00:00.0) -> 3:interval_day_time, DateScalarSubtractDateColumn(val 1999-06-07 00:00:00.0, col 0:date) -> 4:interval_day_time, DateColSubtractDateColumn(col 0:date, col 0:date) -> 5:interval_day_time
+ selectExpressions: DateColSubtractDateScalar(col 0:date, val 1999-06-07) -> 3:interval_day_time, DateScalarSubtractDateColumn(val 1999-06-07, col 0:date) -> 4:interval_day_time, DateColSubtractDateColumn(col 0:date, col 0:date) -> 5:interval_day_time
Statistics: Num rows: 50 Data size: 4800 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: date)
@@ -315,56 +315,56 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@interval_arithmetic_1
#### A masked pattern was here ####
dateval _c1 _c2 _c3
-0004-09-22 -728552 23:00:00.000000000 728552 23:00:00.000000000 0 00:00:00.000000000
-0528-10-27 -537126 23:00:00.000000000 537126 23:00:00.000000000 0 00:00:00.000000000
-1319-02-02 -248481 23:00:00.000000000 248481 23:00:00.000000000 0 00:00:00.000000000
-1404-07-23 -217263 23:00:00.000000000 217263 23:00:00.000000000 0 00:00:00.000000000
-1815-05-06 -67236 23:00:00.000000000 67236 23:00:00.000000000 0 00:00:00.000000000
-1883-04-17 -42418 23:00:00.000000000 42418 23:00:00.000000000 0 00:00:00.000000000
+0004-09-22 -728551 00:00:00.000000000 728551 00:00:00.000000000 0 00:00:00.000000000
+0528-10-27 -537129 00:00:00.000000000 537129 00:00:00.000000000 0 00:00:00.000000000
+1319-02-02 -248490 00:00:00.000000000 248490 00:00:00.000000000 0 00:00:00.000000000
+1404-07-23 -217273 00:00:00.000000000 217273 00:00:00.000000000 0 00:00:00.000000000
+1815-05-06 -67237 00:00:00.000000000 67237 00:00:00.000000000 0 00:00:00.000000000
+1883-04-17 -42419 00:00:00.000000000 42419 00:00:00.000000000 0 00:00:00.000000000
1966-08-16 -11983 00:00:00.000000000 11983 00:00:00.000000000 0 00:00:00.000000000
-1973-04-17 -9546 23:00:00.000000000 9546 23:00:00.000000000 0 00:00:00.000000000
+1973-04-17 -9547 00:00:00.000000000 9547 00:00:00.000000000 0 00:00:00.000000000
1974-10-04 -9012 00:00:00.000000000 9012 00:00:00.000000000 0 00:00:00.000000000
-1976-03-03 -8495 23:00:00.000000000 8495 23:00:00.000000000 0 00:00:00.000000000
+1976-03-03 -8496 00:00:00.000000000 8496 00:00:00.000000000 0 00:00:00.000000000
1976-05-06 -8432 00:00:00.000000000 8432 00:00:00.000000000 0 00:00:00.000000000
1978-08-05 -7611 00:00:00.000000000 7611 00:00:00.000000000 0 00:00:00.000000000
-1981-04-25 -6616 23:00:00.000000000 6616 23:00:00.000000000 0 00:00:00.000000000
-1981-11-15 -6412 23:00:00.000000000 6412 23:00:00.000000000 0 00:00:00.000000000
+1981-04-25 -6617 00:00:00.000000000 6617 00:00:00.000000000 0 00:00:00.000000000
+1981-11-15 -6413 00:00:00.000000000 6413 00:00:00.000000000 0 00:00:00.000000000
1985-07-20 -5070 00:00:00.000000000 5070 00:00:00.000000000 0 00:00:00.000000000
-1985-11-18 -4948 23:00:00.000000000 4948 23:00:00.000000000 0 00:00:00.000000000
-1987-02-21 -4488 23:00:00.000000000 4488 23:00:00.000000000 0 00:00:00.000000000
+1985-11-18 -4949 00:00:00.000000000 4949 00:00:00.000000000 0 00:00:00.000000000
+1987-02-21 -4489 00:00:00.000000000 4489 00:00:00.000000000 0 00:00:00.000000000
1987-05-28 -4393 00:00:00.000000000 4393 00:00:00.000000000 0 00:00:00.000000000
1998-10-16 -234 00:00:00.000000000 234 00:00:00.000000000 0 00:00:00.000000000
1999-10-03 118 00:00:00.000000000 -118 00:00:00.000000000 0 00:00:00.000000000
-2000-12-18 560 01:00:00.000000000 -560 01:00:00.000000000 0 00:00:00.000000000
+2000-12-18 560 00:00:00.000000000 -560 00:00:00.000000000 0 00:00:00.000000000
2002-05-10 1068 00:00:00.000000000 -1068 00:00:00.000000000 0 00:00:00.000000000
2003-09-23 1569 00:00:00.000000000 -1569 00:00:00.000000000 0 00:00:00.000000000
-2004-03-07 1735 01:00:00.000000000 -1735 01:00:00.000000000 0 00:00:00.000000000
-2007-02-09 2804 01:00:00.000000000 -2804 01:00:00.000000000 0 00:00:00.000000000
-2009-01-21 3516 01:00:00.000000000 -3516 01:00:00.000000000 0 00:00:00.000000000
+2004-03-07 1735 00:00:00.000000000 -1735 00:00:00.000000000 0 00:00:00.000000000
+2007-02-09 2804 00:00:00.000000000 -2804 00:00:00.000000000 0 00:00:00.000000000
+2009-01-21 3516 00:00:00.000000000 -3516 00:00:00.000000000 0 00:00:00.000000000
2010-04-08 3958 00:00:00.000000000 -3958 00:00:00.000000000 0 00:00:00.000000000
2013-04-07 5053 00:00:00.000000000 -5053 00:00:00.000000000 0 00:00:00.000000000
2013-04-10 5056 00:00:00.000000000 -5056 00:00:00.000000000 0 00:00:00.000000000
2021-09-24 8145 00:00:00.000000000 -8145 00:00:00.000000000 0 00:00:00.000000000
-2024-11-11 9289 01:00:00.000000000 -9289 01:00:00.000000000 0 00:00:00.000000000
+2024-11-11 9289 00:00:00.000000000 -9289 00:00:00.000000000 0 00:00:00.000000000
4143-07-08 783111 00:00:00.000000000 -783111 00:00:00.000000000 0 00:00:00.000000000
-4966-12-04 1083855 01:00:00.000000000 -1083855 01:00:00.000000000 0 00:00:00.000000000
-5339-02-01 1219784 01:00:00.000000000 -1219784 01:00:00.000000000 0 00:00:00.000000000
+4966-12-04 1083855 00:00:00.000000000 -1083855 00:00:00.000000000 0 00:00:00.000000000
+5339-02-01 1219784 00:00:00.000000000 -1219784 00:00:00.000000000 0 00:00:00.000000000
5344-10-04 1221856 00:00:00.000000000 -1221856 00:00:00.000000000 0 00:00:00.000000000
5397-07-13 1241131 00:00:00.000000000 -1241131 00:00:00.000000000 0 00:00:00.000000000
5966-07-09 1448949 00:00:00.000000000 -1448949 00:00:00.000000000 0 00:00:00.000000000
6229-06-28 1544997 00:00:00.000000000 -1544997 00:00:00.000000000 0 00:00:00.000000000
6482-04-27 1637342 00:00:00.000000000 -1637342 00:00:00.000000000 0 00:00:00.000000000
-6631-11-13 1691962 01:00:00.000000000 -1691962 01:00:00.000000000 0 00:00:00.000000000
+6631-11-13 1691962 00:00:00.000000000 -1691962 00:00:00.000000000 0 00:00:00.000000000
6705-09-28 1718944 00:00:00.000000000 -1718944 00:00:00.000000000 0 00:00:00.000000000
-6731-02-12 1728212 01:00:00.000000000 -1728212 01:00:00.000000000 0 00:00:00.000000000
-7160-12-02 1885195 01:00:00.000000000 -1885195 01:00:00.000000000 0 00:00:00.000000000
+6731-02-12 1728212 00:00:00.000000000 -1728212 00:00:00.000000000 0 00:00:00.000000000
+7160-12-02 1885195 00:00:00.000000000 -1885195 00:00:00.000000000 0 00:00:00.000000000
7409-09-07 1976054 00:00:00.000000000 -1976054 00:00:00.000000000 0 00:00:00.000000000
7503-06-23 2010310 00:00:00.000000000 -2010310 00:00:00.000000000 0 00:00:00.000000000
8422-07-22 2345998 00:00:00.000000000 -2345998 00:00:00.000000000 0 00:00:00.000000000
-8521-01-16 2381970 01:00:00.000000000 -2381970 01:00:00.000000000 0 00:00:00.000000000
+8521-01-16 2381970 00:00:00.000000000 -2381970 00:00:00.000000000 0 00:00:00.000000000
9075-06-13 2584462 00:00:00.000000000 -2584462 00:00:00.000000000 0 00:00:00.000000000
-9209-11-11 2633556 01:00:00.000000000 -2633556 01:00:00.000000000 0 00:00:00.000000000
-9403-01-09 2704106 01:00:00.000000000 -2704106 01:00:00.000000000 0 00:00:00.000000000
+9209-11-11 2633556 00:00:00.000000000 -2633556 00:00:00.000000000 0 00:00:00.000000000
+9403-01-09 2704106 00:00:00.000000000 -2704106 00:00:00.000000000 0 00:00:00.000000000
PREHOOK: query: explain vectorization expression
select
tsval,
@@ -488,53 +488,53 @@ POSTHOOK: Input: default@interval_arithmetic_1
tsval _c1 _c2 _c3 _c4 _c5 _c6
0004-09-22 18:26:29.519542222 0002-07-22 18:26:29.519542222 0006-11-22 18:26:29.519542222 0006-11-22 18:26:29.519542222 0002-07-22 18:26:29.519542222 0002-07-22 18:26:29.519542222 0006-11-22 18:26:29.519542222
0528-10-27 08:15:18.941718273 0526-08-27 08:15:18.941718273 0530-12-27 08:15:18.941718273 0530-12-27 08:15:18.941718273 0526-08-27 08:15:18.941718273 0526-08-27 08:15:18.941718273 0530-12-27 08:15:18.941718273
-1319-02-02 16:31:57.778 1316-12-02 16:31:57.778 1321-04-02 16:31:57.778 1321-04-02 16:31:57.778 1316-12-02 16:31:57.778 1316-12-02 16:31:57.778 1321-04-02 16:31:57.778
+1319-02-02 16:31:57.778 1316-12-03 16:31:57.778 1321-04-02 16:31:57.778 1321-04-02 16:31:57.778 1316-12-03 16:31:57.778 1316-12-03 16:31:57.778 1321-04-02 16:31:57.778
1404-07-23 15:32:16.059185026 1402-05-23 15:32:16.059185026 1406-09-23 15:32:16.059185026 1406-09-23 15:32:16.059185026 1402-05-23 15:32:16.059185026 1402-05-23 15:32:16.059185026 1406-09-23 15:32:16.059185026
1815-05-06 00:12:37.543584705 1813-03-06 00:12:37.543584705 1817-07-06 00:12:37.543584705 1817-07-06 00:12:37.543584705 1813-03-06 00:12:37.543584705 1813-03-06 00:12:37.543584705 1817-07-06 00:12:37.543584705
1883-04-17 04:14:34.647766229 1881-02-17 04:14:34.647766229 1885-06-17 04:14:34.647766229 1885-06-17 04:14:34.647766229 1881-02-17 04:14:34.647766229 1881-02-17 04:14:34.647766229 1885-06-17 04:14:34.647766229
1966-08-16 13:36:50.183618031 1964-06-16 13:36:50.183618031 1968-10-16 13:36:50.183618031 1968-10-16 13:36:50.183618031 1964-06-16 13:36:50.183618031 1964-06-16 13:36:50.183618031 1968-10-16 13:36:50.183618031
-1973-04-17 06:30:38.596784156 1971-02-17 06:30:38.596784156 1975-06-17 07:30:38.596784156 1975-06-17 07:30:38.596784156 1971-02-17 06:30:38.596784156 1971-02-17 06:30:38.596784156 1975-06-17 07:30:38.596784156
-1974-10-04 17:21:03.989 1972-08-04 17:21:03.989 1976-12-04 16:21:03.989 1976-12-04 16:21:03.989 1972-08-04 17:21:03.989 1972-08-04 17:21:03.989 1976-12-04 16:21:03.989
-1976-03-03 04:54:33.000895162 1974-01-03 04:54:33.000895162 1978-05-03 05:54:33.000895162 1978-05-03 05:54:33.000895162 1974-01-03 04:54:33.000895162 1974-01-03 04:54:33.000895162 1978-05-03 05:54:33.000895162
+1973-04-17 06:30:38.596784156 1971-02-17 06:30:38.596784156 1975-06-17 06:30:38.596784156 1975-06-17 06:30:38.596784156 1971-02-17 06:30:38.596784156 1971-02-17 06:30:38.596784156 1975-06-17 06:30:38.596784156
+1974-10-04 17:21:03.989 1972-08-04 17:21:03.989 1976-12-04 17:21:03.989 1976-12-04 17:21:03.989 1972-08-04 17:21:03.989 1972-08-04 17:21:03.989 1976-12-04 17:21:03.989
+1976-03-03 04:54:33.000895162 1974-01-03 04:54:33.000895162 1978-05-03 04:54:33.000895162 1978-05-03 04:54:33.000895162 1974-01-03 04:54:33.000895162 1974-01-03 04:54:33.000895162 1978-05-03 04:54:33.000895162
1976-05-06 00:42:30.910786948 1974-03-06 00:42:30.910786948 1978-07-06 00:42:30.910786948 1978-07-06 00:42:30.910786948 1974-03-06 00:42:30.910786948 1974-03-06 00:42:30.910786948 1978-07-06 00:42:30.910786948
1978-08-05 14:41:05.501 1976-06-05 14:41:05.501 1980-10-05 14:41:05.501 1980-10-05 14:41:05.501 1976-06-05 14:41:05.501 1976-06-05 14:41:05.501 1980-10-05 14:41:05.501
-1981-04-25 09:01:12.077192689 1979-02-25 09:01:12.077192689 1983-06-25 10:01:12.077192689 1983-06-25 10:01:12.077192689 1979-02-25 09:01:12.077192689 1979-02-25 09:01:12.077192689 1983-06-25 10:01:12.077192689
-1981-11-15 23:03:10.999338387 1979-09-16 00:03:10.999338387 1984-01-15 23:03:10.999338387 1984-01-15 23:03:10.999338387 1979-09-16 00:03:10.999338387 1979-09-16 00:03:10.999338387 1984-01-15 23:03:10.999338387
+1981-04-25 09:01:12.077192689 1979-02-25 09:01:12.077192689 1983-06-25 09:01:12.077192689 1983-06-25 09:01:12.077192689 1979-02-25 09:01:12.077192689 1979-02-25 09:01:12.077192689 1983-06-25 09:01:12.077192689
+1981-11-15 23:03:10.999338387 1979-09-15 23:03:10.999338387 1984-01-15 23:03:10.999338387 1984-01-15 23:03:10.999338387 1979-09-15 23:03:10.999338387 1979-09-15 23:03:10.999338387 1984-01-15 23:03:10.999338387
1985-07-20 09:30:11 1983-05-20 09:30:11 1987-09-20 09:30:11 1987-09-20 09:30:11 1983-05-20 09:30:11 1983-05-20 09:30:11 1987-09-20 09:30:11
-1985-11-18 16:37:54 1983-09-18 17:37:54 1988-01-18 16:37:54 1988-01-18 16:37:54 1983-09-18 17:37:54 1983-09-18 17:37:54 1988-01-18 16:37:54
-1987-02-21 19:48:29 1984-12-21 19:48:29 1989-04-21 20:48:29 1989-04-21 20:48:29 1984-12-21 19:48:29 1984-12-21 19:48:29 1989-04-21 20:48:29
-1987-05-28 13:52:07.900916635 1985-03-28 12:52:07.900916635 1989-07-28 13:52:07.900916635 1989-07-28 13:52:07.900916635 1985-03-28 12:52:07.900916635 1985-03-28 12:52:07.900916635 1989-07-28 13:52:07.900916635
-1998-10-16 20:05:29.397591987 1996-08-16 20:05:29.397591987 2000-12-16 19:05:29.397591987 2000-12-16 19:05:29.397591987 1996-08-16 20:05:29.397591987 1996-08-16 20:05:29.397591987 2000-12-16 19:05:29.397591987
-1999-10-03 16:59:10.396903939 1997-08-03 16:59:10.396903939 2001-12-03 15:59:10.396903939 2001-12-03 15:59:10.396903939 1997-08-03 16:59:10.396903939 1997-08-03 16:59:10.396903939 2001-12-03 15:59:10.396903939
-2000-12-18 08:42:30.000595596 1998-10-18 09:42:30.000595596 2003-02-18 08:42:30.000595596 2003-02-18 08:42:30.000595596 1998-10-18 09:42:30.000595596 1998-10-18 09:42:30.000595596 2003-02-18 08:42:30.000595596
-2002-05-10 05:29:48.990818073 2000-03-10 04:29:48.990818073 2004-07-10 05:29:48.990818073 2004-07-10 05:29:48.990818073 2000-03-10 04:29:48.990818073 2000-03-10 04:29:48.990818073 2004-07-10 05:29:48.990818073
-2003-09-23 22:33:17.00003252 2001-07-23 22:33:17.00003252 2005-11-23 21:33:17.00003252 2005-11-23 21:33:17.00003252 2001-07-23 22:33:17.00003252 2001-07-23 22:33:17.00003252 2005-11-23 21:33:17.00003252
-2004-03-07 20:14:13 2002-01-07 20:14:13 2006-05-07 21:14:13 2006-05-07 21:14:13 2002-01-07 20:14:13 2002-01-07 20:14:13 2006-05-07 21:14:13
-2007-02-09 05:17:29.368756876 2004-12-09 05:17:29.368756876 2009-04-09 06:17:29.368756876 2009-04-09 06:17:29.368756876 2004-12-09 05:17:29.368756876 2004-12-09 05:17:29.368756876 2009-04-09 06:17:29.368756876
-2009-01-21 10:49:07.108 2006-11-21 10:49:07.108 2011-03-21 11:49:07.108 2011-03-21 11:49:07.108 2006-11-21 10:49:07.108 2006-11-21 10:49:07.108 2011-03-21 11:49:07.108
-2010-04-08 02:43:35.861742727 2008-02-08 01:43:35.861742727 2012-06-08 02:43:35.861742727 2012-06-08 02:43:35.861742727 2008-02-08 01:43:35.861742727 2008-02-08 01:43:35.861742727 2012-06-08 02:43:35.861742727
-2013-04-07 02:44:43.00086821 2011-02-07 01:44:43.00086821 2015-06-07 02:44:43.00086821 2015-06-07 02:44:43.00086821 2011-02-07 01:44:43.00086821 2011-02-07 01:44:43.00086821 2015-06-07 02:44:43.00086821
-2013-04-10 00:43:46.854731546 2011-02-09 23:43:46.854731546 2015-06-10 00:43:46.854731546 2015-06-10 00:43:46.854731546 2011-02-09 23:43:46.854731546 2011-02-09 23:43:46.854731546 2015-06-10 00:43:46.854731546
-2021-09-24 03:18:32.413655165 2019-07-24 03:18:32.413655165 2023-11-24 02:18:32.413655165 2023-11-24 02:18:32.413655165 2019-07-24 03:18:32.413655165 2019-07-24 03:18:32.413655165 2023-11-24 02:18:32.413655165
-2024-11-11 16:42:41.101 2022-09-11 17:42:41.101 2027-01-11 16:42:41.101 2027-01-11 16:42:41.101 2022-09-11 17:42:41.101 2022-09-11 17:42:41.101 2027-01-11 16:42:41.101
+1985-11-18 16:37:54 1983-09-18 16:37:54 1988-01-18 16:37:54 1988-01-18 16:37:54 1983-09-18 16:37:54 1983-09-18 16:37:54 1988-01-18 16:37:54
+1987-02-21 19:48:29 1984-12-21 19:48:29 1989-04-21 19:48:29 1989-04-21 19:48:29 1984-12-21 19:48:29 1984-12-21 19:48:29 1989-04-21 19:48:29
+1987-05-28 13:52:07.900916635 1985-03-28 13:52:07.900916635 1989-07-28 13:52:07.900916635 1989-07-28 13:52:07.900916635 1985-03-28 13:52:07.900916635 1985-03-28 13:52:07.900916635 1989-07-28 13:52:07.900916635
+1998-10-16 20:05:29.397591987 1996-08-16 20:05:29.397591987 2000-12-16 20:05:29.397591987 2000-12-16 20:05:29.397591987 1996-08-16 20:05:29.397591987 1996-08-16 20:05:29.397591987 2000-12-16 20:05:29.397591987
+1999-10-03 16:59:10.396903939 1997-08-03 16:59:10.396903939 2001-12-03 16:59:10.396903939 2001-12-03 16:59:10.396903939 1997-08-03 16:59:10.396903939 1997-08-03 16:59:10.396903939 2001-12-03 16:59:10.396903939
+2000-12-18 08:42:30.000595596 1998-10-18 08:42:30.000595596 2003-02-18 08:42:30.000595596 2003-02-18 08:42:30.000595596 1998-10-18 08:42:30.000595596 1998-10-18 08:42:30.000595596 2003-02-18 08:42:30.000595596
+2002-05-10 05:29:48.990818073 2000-03-10 05:29:48.990818073 2004-07-10 05:29:48.990818073 2004-07-10 05:29:48.990818073 2000-03-10 05:29:48.990818073 2000-03-10 05:29:48.990818073 2004-07-10 05:29:48.990818073
+2003-09-23 22:33:17.00003252 2001-07-23 22:33:17.00003252 2005-11-23 22:33:17.00003252 2005-11-23 22:33:17.00003252 2001-07-23 22:33:17.00003252 2001-07-23 22:33:17.00003252 2005-11-23 22:33:17.00003252
+2004-03-07 20:14:13 2002-01-07 20:14:13 2006-05-07 20:14:13 2006-05-07 20:14:13 2002-01-07 20:14:13 2002-01-07 20:14:13 2006-05-07 20:14:13
+2007-02-09 05:17:29.368756876 2004-12-09 05:17:29.368756876 2009-04-09 05:17:29.368756876 2009-04-09 05:17:29.368756876 2004-12-09 05:17:29.368756876 2004-12-09 05:17:29.368756876 2009-04-09 05:17:29.368756876
+2009-01-21 10:49:07.108 2006-11-21 10:49:07.108 2011-03-21 10:49:07.108 2011-03-21 10:49:07.108 2006-11-21 10:49:07.108 2006-11-21 10:49:07.108 2011-03-21 10:49:07.108
+2010-04-08 02:43:35.861742727 2008-02-08 02:43:35.861742727 2012-06-08 02:43:35.861742727 2012-06-08 02:43:35.861742727 2008-02-08 02:43:35.861742727 2008-02-08 02:43:35.861742727 2012-06-08 02:43:35.861742727
+2013-04-07 02:44:43.00086821 2011-02-07 02:44:43.00086821 2015-06-07 02:44:43.00086821 2015-06-07 02:44:43.00086821 2011-02-07 02:44:43.00086821 2011-02-07 02:44:43.00086821 2015-06-07 02:44:43.00086821
+2013-04-10 00:43:46.854731546 2011-02-10 00:43:46.854731546 2015-06-10 00:43:46.854731546 2015-06-10 00:43:46.854731546 2011-02-10 00:43:46.854731546 2011-02-10 00:43:46.854731546 2015-06-10 00:43:46.854731546
+2021-09-24 03:18:32.413655165 2019-07-24 03:18:32.413655165 2023-11-24 03:18:32.413655165 2023-11-24 03:18:32.413655165 2019-07-24 03:18:32.413655165 2019-07-24 03:18:32.413655165 2023-11-24 03:18:32.413655165
+2024-11-11 16:42:41.101 2022-09-11 16:42:41.101 2027-01-11 16:42:41.101 2027-01-11 16:42:41.101 2022-09-11 16:42:41.101 2022-09-11 16:42:41.101 2027-01-11 16:42:41.101
4143-07-08 10:53:27.252802259 4141-05-08 10:53:27.252802259 4145-09-08 10:53:27.252802259 4145-09-08 10:53:27.252802259 4141-05-08 10:53:27.252802259 4141-05-08 10:53:27.252802259 4145-09-08 10:53:27.252802259
-4966-12-04 09:30:55.202 4964-10-04 10:30:55.202 4969-02-04 09:30:55.202 4969-02-04 09:30:55.202 4964-10-04 10:30:55.202 4964-10-04 10:30:55.202 4969-02-04 09:30:55.202
-5339-02-01 14:10:01.085678691 5336-12-01 14:10:01.085678691 5341-04-01 15:10:01.085678691 5341-04-01 15:10:01.085678691 5336-12-01 14:10:01.085678691 5336-12-01 14:10:01.085678691 5341-04-01 15:10:01.085678691
-5344-10-04 18:40:08.165 5342-08-04 18:40:08.165 5346-12-04 17:40:08.165 5346-12-04 17:40:08.165 5342-08-04 18:40:08.165 5342-08-04 18:40:08.165 5346-12-04 17:40:08.165
+4966-12-04 09:30:55.202 4964-10-04 09:30:55.202 4969-02-04 09:30:55.202 4969-02-04 09:30:55.202 4964-10-04 09:30:55.202 4964-10-04 09:30:55.202 4969-02-04 09:30:55.202
+5339-02-01 14:10:01.085678691 5336-12-01 14:10:01.085678691 5341-04-01 14:10:01.085678691 5341-04-01 14:10:01.085678691 5336-12-01 14:10:01.085678691 5336-12-01 14:10:01.085678691 5341-04-01 14:10:01.085678691
+5344-10-04 18:40:08.165 5342-08-04 18:40:08.165 5346-12-04 18:40:08.165 5346-12-04 18:40:08.165 5342-08-04 18:40:08.165 5342-08-04 18:40:08.165 5346-12-04 18:40:08.165
5397-07-13 07:12:32.000896438 5395-05-13 07:12:32.000896438 5399-09-13 07:12:32.000896438 5399-09-13 07:12:32.000896438 5395-05-13 07:12:32.000896438 5395-05-13 07:12:32.000896438 5399-09-13 07:12:32.000896438
5966-07-09 03:30:50.597 5964-05-09 03:30:50.597 5968-09-09 03:30:50.597 5968-09-09 03:30:50.597 5964-05-09 03:30:50.597 5964-05-09 03:30:50.597 5968-09-09 03:30:50.597
6229-06-28 02:54:28.970117179 6227-04-28 02:54:28.970117179 6231-08-28 02:54:28.970117179 6231-08-28 02:54:28.970117179 6227-04-28 02:54:28.970117179 6227-04-28 02:54:28.970117179 6231-08-28 02:54:28.970117179
-6482-04-27 12:07:38.073915413 6480-02-27 11:07:38.073915413 6484-06-27 12:07:38.073915413 6484-06-27 12:07:38.073915413 6480-02-27 11:07:38.073915413 6480-02-27 11:07:38.073915413 6484-06-27 12:07:38.073915413
-6631-11-13 16:31:29.702202248 6629-09-13 17:31:29.702202248 6634-01-13 16:31:29.702202248 6634-01-13 16:31:29.702202248 6629-09-13 17:31:29.702202248 6629-09-13 17:31:29.702202248 6634-01-13 16:31:29.702202248
-6705-09-28 18:27:28.000845672 6703-07-28 18:27:28.000845672 6707-11-28 17:27:28.000845672 6707-11-28 17:27:28.000845672 6703-07-28 18:27:28.000845672 6703-07-28 18:27:28.000845672 6707-11-28 17:27:28.000845672
-6731-02-12 08:12:48.287783702 6728-12-12 08:12:48.287783702 6733-04-12 09:12:48.287783702 6733-04-12 09:12:48.287783702 6728-12-12 08:12:48.287783702 6728-12-12 08:12:48.287783702 6733-04-12 09:12:48.287783702
-7160-12-02 06:00:24.81200852 7158-10-02 07:00:24.81200852 7163-02-02 06:00:24.81200852 7163-02-02 06:00:24.81200852 7158-10-02 07:00:24.81200852 7158-10-02 07:00:24.81200852 7163-02-02 06:00:24.81200852
-7409-09-07 23:33:32.459349602 7407-07-07 23:33:32.459349602 7411-11-07 22:33:32.459349602 7411-11-07 22:33:32.459349602 7407-07-07 23:33:32.459349602 7407-07-07 23:33:32.459349602 7411-11-07 22:33:32.459349602
+6482-04-27 12:07:38.073915413 6480-02-27 12:07:38.073915413 6484-06-27 12:07:38.073915413 6484-06-27 12:07:38.073915413 6480-02-27 12:07:38.073915413 6480-02-27 12:07:38.073915413 6484-06-27 12:07:38.073915413
+6631-11-13 16:31:29.702202248 6629-09-13 16:31:29.702202248 6634-01-13 16:31:29.702202248 6634-01-13 16:31:29.702202248 6629-09-13 16:31:29.702202248 6629-09-13 16:31:29.702202248 6634-01-13 16:31:29.702202248
+6705-09-28 18:27:28.000845672 6703-07-28 18:27:28.000845672 6707-11-28 18:27:28.000845672 6707-11-28 18:27:28.000845672 6703-07-28 18:27:28.000845672 6703-07-28 18:27:28.000845672 6707-11-28 18:27:28.000845672
+6731-02-12 08:12:48.287783702 6728-12-12 08:12:48.287783702 6733-04-12 08:12:48.287783702 6733-04-12 08:12:48.287783702 6728-12-12 08:12:48.287783702 6728-12-12 08:12:48.287783702 6733-04-12 08:12:48.287783702
+7160-12-02 06:00:24.81200852 7158-10-02 06:00:24.81200852 7163-02-02 06:00:24.81200852 7163-02-02 06:00:24.81200852 7158-10-02 06:00:24.81200852 7158-10-02 06:00:24.81200852 7163-02-02 06:00:24.81200852
+7409-09-07 23:33:32.459349602 7407-07-07 23:33:32.459349602 7411-11-07 23:33:32.459349602 7411-11-07 23:33:32.459349602 7407-07-07 23:33:32.459349602 7407-07-07 23:33:32.459349602 7411-11-07 23:33:32.459349602
7503-06-23 23:14:17.486 7501-04-23 23:14:17.486 7505-08-23 23:14:17.486 7505-08-23 23:14:17.486 7501-04-23 23:14:17.486 7501-04-23 23:14:17.486 7505-08-23 23:14:17.486
8422-07-22 03:21:45.745036084 8420-05-22 03:21:45.745036084 8424-09-22 03:21:45.745036084 8424-09-22 03:21:45.745036084 8420-05-22 03:21:45.745036084 8420-05-22 03:21:45.745036084 8424-09-22 03:21:45.745036084
-8521-01-16 20:42:05.668832388 8518-11-16 20:42:05.668832388 8523-03-16 21:42:05.668832388 8523-03-16 21:42:05.668832388 8518-11-16 20:42:05.668832388 8518-11-16 20:42:05.668832388 8523-03-16 21:42:05.668832388
+8521-01-16 20:42:05.668832388 8518-11-16 20:42:05.668832388 8523-03-16 20:42:05.668832388 8523-03-16 20:42:05.668832388 8518-11-16 20:42:05.668832388 8518-11-16 20:42:05.668832388 8523-03-16 20:42:05.668832388
9075-06-13 16:20:09.218517797 9073-04-13 16:20:09.218517797 9077-08-13 16:20:09.218517797 9077-08-13 16:20:09.218517797 9073-04-13 16:20:09.218517797 9073-04-13 16:20:09.218517797 9077-08-13 16:20:09.218517797
-9209-11-11 04:08:58.223768453 9207-09-11 05:08:58.223768453 9212-01-11 04:08:58.223768453 9212-01-11 04:08:58.223768453 9207-09-11 05:08:58.223768453 9207-09-11 05:08:58.223768453 9212-01-11 04:08:58.223768453
+9209-11-11 04:08:58.223768453 9207-09-11 04:08:58.223768453 9212-01-11 04:08:58.223768453 9212-01-11 04:08:58.223768453 9207-09-11 04:08:58.223768453 9207-09-11 04:08:58.223768453 9212-01-11 04:08:58.223768453
9403-01-09 18:12:33.547 9400-11-09 18:12:33.547 9405-03-09 18:12:33.547 9405-03-09 18:12:33.547 9400-11-09 18:12:33.547 9400-11-09 18:12:33.547 9405-03-09 18:12:33.547
PREHOOK: query: explain vectorization expression
select
@@ -760,50 +760,50 @@ dateval _c1 _c2 _c3 _c4 _c5 _c6
1404-07-23 1404-04-14 12:37:26.876543211 1404-10-30 11:22:33.123456789 1404-10-30 11:22:33.123456789 1404-04-14 12:37:26.876543211 1404-04-14 12:37:26.876543211 1404-10-30 11:22:33.123456789
1815-05-06 1815-01-26 12:37:26.876543211 1815-08-13 11:22:33.123456789 1815-08-13 11:22:33.123456789 1815-01-26 12:37:26.876543211 1815-01-26 12:37:26.876543211 1815-08-13 11:22:33.123456789
1883-04-17 1883-01-07 12:37:26.876543211 1883-07-25 11:22:33.123456789 1883-07-25 11:22:33.123456789 1883-01-07 12:37:26.876543211 1883-01-07 12:37:26.876543211 1883-07-25 11:22:33.123456789
-1966-08-16 1966-05-08 12:37:26.876543211 1966-11-23 10:22:33.123456789 1966-11-23 10:22:33.123456789 1966-05-08 12:37:26.876543211 1966-05-08 12:37:26.876543211 1966-11-23 10:22:33.123456789
-1973-04-17 1973-01-07 12:37:26.876543211 1973-07-25 12:22:33.123456789 1973-07-25 12:22:33.123456789 1973-01-07 12:37:26.876543211 1973-01-07 12:37:26.876543211 1973-07-25 12:22:33.123456789
-1974-10-04 1974-06-26 12:37:26.876543211 1975-01-11 10:22:33.123456789 1975-01-11 10:22:33.123456789 1974-06-26 12:37:26.876543211 1974-06-26 12:37:26.876543211 1975-01-11 10:22:33.123456789
-1976-03-03 1975-11-24 12:37:26.876543211 1976-06-10 12:22:33.123456789 1976-06-10 12:22:33.123456789 1975-11-24 12:37:26.876543211 1975-11-24 12:37:26.876543211 1976-06-10 12:22:33.123456789
-1976-05-06 1976-01-27 11:37:26.876543211 1976-08-13 11:22:33.123456789 1976-08-13 11:22:33.123456789 1976-01-27 11:37:26.876543211 1976-01-27 11:37:26.876543211 1976-08-13 11:22:33.123456789
-1978-08-05 1978-04-27 11:37:26.876543211 1978-11-12 10:22:33.123456789 1978-11-12 10:22:33.123456789 1978-04-27 11:37:26.876543211 1978-04-27 11:37:26.876543211 1978-11-12 10:22:33.123456789
-1981-04-25 1981-01-15 12:37:26.876543211 1981-08-02 12:22:33.123456789 1981-08-02 12:22:33.123456789 1981-01-15 12:37:26.876543211 1981-01-15 12:37:26.876543211 1981-08-02 12:22:33.123456789
-1981-11-15 1981-08-07 13:37:26.876543211 1982-02-22 11:22:33.123456789 1982-02-22 11:22:33.123456789 1981-08-07 13:37:26.876543211 1981-08-07 13:37:26.876543211 1982-02-22 11:22:33.123456789
-1985-07-20 1985-04-11 11:37:26.876543211 1985-10-27 10:22:33.123456789 1985-10-27 10:22:33.123456789 1985-04-11 11:37:26.876543211 1985-04-11 11:37:26.876543211 1985-10-27 10:22:33.123456789
-1985-11-18 1985-08-10 13:37:26.876543211 1986-02-25 11:22:33.123456789 1986-02-25 11:22:33.123456789 1985-08-10 13:37:26.876543211 1985-08-10 13:37:26.876543211 1986-02-25 11:22:33.123456789
-1987-02-21 1986-11-13 12:37:26.876543211 1987-05-31 12:22:33.123456789 1987-05-31 12:22:33.123456789 1986-11-13 12:37:26.876543211 1986-11-13 12:37:26.876543211 1987-05-31 12:22:33.123456789
-1987-05-28 1987-02-17 11:37:26.876543211 1987-09-04 11:22:33.123456789 1987-09-04 11:22:33.123456789 1987-02-17 11:37:26.876543211 1987-02-17 11:37:26.876543211 1987-09-04 11:22:33.123456789
-1998-10-16 1998-07-08 12:37:26.876543211 1999-01-23 10:22:33.123456789 1999-01-23 10:22:33.123456789 1998-07-08 12:37:26.876543211 1998-07-08 12:37:26.876543211 1999-01-23 10:22:33.123456789
-1999-10-03 1999-06-25 12:37:26.876543211 2000-01-10 10:22:33.123456789 2000-01-10 10:22:33.123456789 1999-06-25 12:37:26.876543211 1999-06-25 12:37:26.876543211 2000-01-10 10:22:33.123456789
-2000-12-18 2000-09-09 13:37:26.876543211 2001-03-27 11:22:33.123456789 2001-03-27 11:22:33.123456789 2000-09-09 13:37:26.876543211 2000-09-09 13:37:26.876543211 2001-03-27 11:22:33.123456789
-2002-05-10 2002-01-30 11:37:26.876543211 2002-08-17 11:22:33.123456789 2002-08-17 11:22:33.123456789 2002-01-30 11:37:26.876543211 2002-01-30 11:37:26.876543211 2002-08-17 11:22:33.123456789
-2003-09-23 2003-06-15 12:37:26.876543211 2003-12-31 10:22:33.123456789 2003-12-31 10:22:33.123456789 2003-06-15 12:37:26.876543211 2003-06-15 12:37:26.876543211 2003-12-31 10:22:33.123456789
-2004-03-07 2003-11-28 12:37:26.876543211 2004-06-14 12:22:33.123456789 2004-06-14 12:22:33.123456789 2003-11-28 12:37:26.876543211 2003-11-28 12:37:26.876543211 2004-06-14 12:22:33.123456789
-2007-02-09 2006-11-01 12:37:26.876543211 2007-05-19 12:22:33.123456789 2007-05-19 12:22:33.123456789 2006-11-01 12:37:26.876543211 2006-11-01 12:37:26.876543211 2007-05-19 12:22:33.123456789
-2009-01-21 2008-10-13 13:37:26.876543211 2009-04-30 12:22:33.123456789 2009-04-30 12:22:33.123456789 2008-10-13 13:37:26.876543211 2008-10-13 13:37:26.876543211 2009-04-30 12:22:33.123456789
-2010-04-08 2009-12-29 11:37:26.876543211 2010-07-16 11:22:33.123456789 2010-07-16 11:22:33.123456789 2009-12-29 11:37:26.876543211 2009-12-29 11:37:26.876543211 2010-07-16 11:22:33.123456789
-2013-04-07 2012-12-28 11:37:26.876543211 2013-07-15 11:22:33.123456789 2013-07-15 11:22:33.123456789 2012-12-28 11:37:26.876543211 2012-12-28 11:37:26.876543211 2013-07-15 11:22:33.123456789
-2013-04-10 2012-12-31 11:37:26.876543211 2013-07-18 11:22:33.123456789 2013-07-18 11:22:33.123456789 2012-12-31 11:37:26.876543211 2012-12-31 11:37:26.876543211 2013-07-18 11:22:33.123456789
-2021-09-24 2021-06-16 12:37:26.876543211 2022-01-01 10:22:33.123456789 2022-01-01 10:22:33.123456789 2021-06-16 12:37:26.876543211 2021-06-16 12:37:26.876543211 2022-01-01 10:22:33.123456789
-2024-11-11 2024-08-03 13:37:26.876543211 2025-02-18 11:22:33.123456789 2025-02-18 11:22:33.123456789 2024-08-03 13:37:26.876543211 2024-08-03 13:37:26.876543211 2025-02-18 11:22:33.123456789
+1966-08-16 1966-05-08 12:37:26.876543211 1966-11-23 11:22:33.123456789 1966-11-23 11:22:33.123456789 1966-05-08 12:37:26.876543211 1966-05-08 12:37:26.876543211 1966-11-23 11:22:33.123456789
+1973-04-17 1973-01-07 12:37:26.876543211 1973-07-25 11:22:33.123456789 1973-07-25 11:22:33.123456789 1973-01-07 12:37:26.876543211 1973-01-07 12:37:26.876543211 1973-07-25 11:22:33.123456789
+1974-10-04 1974-06-26 12:37:26.876543211 1975-01-11 11:22:33.123456789 1975-01-11 11:22:33.123456789 1974-06-26 12:37:26.876543211 1974-06-26 12:37:26.876543211 1975-01-11 11:22:33.123456789
+1976-03-03 1975-11-24 12:37:26.876543211 1976-06-10 11:22:33.123456789 1976-06-10 11:22:33.123456789 1975-11-24 12:37:26.876543211 1975-11-24 12:37:26.876543211 1976-06-10 11:22:33.123456789
+1976-05-06 1976-01-27 12:37:26.876543211 1976-08-13 11:22:33.123456789 1976-08-13 11:22:33.123456789 1976-01-27 12:37:26.876543211 1976-01-27 12:37:26.876543211 1976-08-13 11:22:33.123456789
+1978-08-05 1978-04-27 12:37:26.876543211 1978-11-12 11:22:33.123456789 1978-11-12 11:22:33.123456789 1978-04-27 12:37:26.876543211 1978-04-27 12:37:26.876543211 1978-11-12 11:22:33.123456789
+1981-04-25 1981-01-15 12:37:26.876543211 1981-08-02 11:22:33.123456789 1981-08-02 11:22:33.123456789 1981-01-15 12:37:26.876543211 1981-01-15 12:37:26.876543211 1981-08-02 11:22:33.123456789
+1981-11-15 1981-08-07 12:37:26.876543211 1982-02-22 11:22:33.123456789 1982-02-22 11:22:33.123456789 1981-08-07 12:37:26.876543211 1981-08-07 12:37:26.876543211 1982-02-22 11:22:33.123456789
+1985-07-20 1985-04-11 12:37:26.876543211 1985-10-27 11:22:33.123456789 1985-10-27 11:22:33.123456789 1985-04-11 12:37:26.876543211 1985-04-11 12:37:26.876543211 1985-10-27 11:22:33.123456789
+1985-11-18 1985-08-10 12:37:26.876543211 1986-02-25 11:22:33.123456789 1986-02-25 11:22:33.123456789 1985-08-10 12:37:26.876543211 1985-08-10 12:37:26.876543211 1986-02-25 11:22:33.123456789
+1987-02-21 1986-11-13 12:37:26.876543211 1987-05-31 11:22:33.123456789 1987-05-31 11:22:33.123456789 1986-11-13 12:37:26.876543211 1986-11-13 12:37:26.876543211 1987-05-31 11:22:33.123456789
+1987-05-28 1987-02-17 12:37:26.876543211 1987-09-04 11:22:33.123456789 1987-09-04 11:22:33.123456789 1987-02-17 12:37:26.876543211 1987-02-17 12:37:26.876543211 1987-09-04 11:22:33.123456789
+1998-10-16 1998-07-08 12:37:26.876543211 1999-01-23 11:22:33.123456789 1999-01-23 11:22:33.123456789 1998-07-08 12:37:26.876543211 1998-07-08 12:37:26.876543211 1999-01-23 11:22:33.123456789
+1999-10-03 1999-06-25 12:37:26.876543211 2000-01-10 11:22:33.123456789 2000-01-10 11:22:33.123456789 1999-06-25 12:37:26.876543211 1999-06-25 12:37:26.876543211 2000-01-10 11:22:33.123456789
+2000-12-18 2000-09-09 12:37:26.876543211 2001-03-27 11:22:33.123456789 2001-03-27 11:22:33.123456789 2000-09-09 12:37:26.876543211 2000-09-09 12:37:26.876543211 2001-03-27 11:22:33.123456789
+2002-05-10 2002-01-30 12:37:26.876543211 2002-08-17 11:22:33.123456789 2002-08-17 11:22:33.123456789 2002-01-30 12:37:26.876543211 2002-01-30 12:37:26.876543211 2002-08-17 11:22:33.123456789
+2003-09-23 2003-06-15 12:37:26.876543211 2003-12-31 11:22:33.123456789 2003-12-31 11:22:33.123456789 2003-06-15 12:37:26.876543211 2003-06-15 12:37:26.876543211 2003-12-31 11:22:33.123456789
+2004-03-07 2003-11-28 12:37:26.876543211 2004-06-14 11:22:33.123456789 2004-06-14 11:22:33.123456789 2003-11-28 12:37:26.876543211 2003-11-28 12:37:26.876543211 2004-06-14 11:22:33.123456789
+2007-02-09 2006-11-01 12:37:26.876543211 2007-05-19 11:22:33.123456789 2007-05-19 11:22:33.123456789 2006-11-01 12:37:26.876543211 2006-11-01 12:37:26.876543211 2007-05-19 11:22:33.123456789
+2009-01-21 2008-10-13 12:37:26.876543211 2009-04-30 11:22:33.123456789 2009-04-30 11:22:33.123456789 2008-10-13 12:37:26.876543211 2008-10-13 12:37:26.876543211 2009-04-30 11:22:33.123456789
+2010-04-08 2009-12-29 12:37:26.876543211 2010-07-16 11:22:33.123456789 2010-07-16 11:22:33.123456789 2009-12-29 12:37:26.876543211 2009-12-29 12:37:26.876543211 2010-07-16 11:22:33.123456789
+2013-04-07 2012-12-28 12:37:26.876543211 2013-07-15 11:22:33.123456789 2013-07-15 11:22:33.123456789 2012-12-28 12:37:26.876543211 2012-12-28 12:37:26.876543211 2013-07-15 11:22:33.123456789
+2013-04-10 2012-12-31 12:37:26.876543211 2013-07-18 11:22:33.123456789 2013-07-18 11:22:33.123456789 2012-12-31 12:37:26.876543211 2012-12-31 12:37:26.876543211 2013-07-18 11:22:33.123456789
+2021-09-24 2021-06-16 12:37:26.876543211 2022-01-01 11:22:33.123456789 2022-01-01 11:22:33.123456789 2021-06-16 12:37:26.876543211 2021-06-16 12:37:26.876543211 2022-01-01 11:22:33.123456789
+2024-11-11 2024-08-03 12:37:26.876543211 2025-02-18 11:22:33.123456789 2025-02-18 11:22:33.123456789 2024-08-03 12:37:26.876543211 2024-08-03 12:37:26.876543211 2025-02-18 11:22:33.123456789
4143-07-08 4143-03-30 12:37:26.876543211 4143-10-15 11:22:33.123456789 4143-10-15 11:22:33.123456789 4143-03-30 12:37:26.876543211 4143-03-30 12:37:26.876543211 4143-10-15 11:22:33.123456789
-4966-12-04 4966-08-26 13:37:26.876543211 4967-03-13 12:22:33.123456789 4967-03-13 12:22:33.123456789 4966-08-26 13:37:26.876543211 4966-08-26 13:37:26.876543211 4967-03-13 12:22:33.123456789
-5339-02-01 5338-10-24 13:37:26.876543211 5339-05-11 12:22:33.123456789 5339-05-11 12:22:33.123456789 5338-10-24 13:37:26.876543211 5338-10-24 13:37:26.876543211 5339-05-11 12:22:33.123456789
-5344-10-04 5344-06-26 12:37:26.876543211 5345-01-11 10:22:33.123456789 5345-01-11 10:22:33.123456789 5344-06-26 12:37:26.876543211 5344-06-26 12:37:26.876543211 5345-01-11 10:22:33.123456789
+4966-12-04 4966-08-26 12:37:26.876543211 4967-03-13 11:22:33.123456789 4967-03-13 11:22:33.123456789 4966-08-26 12:37:26.876543211 4966-08-26 12:37:26.876543211 4967-03-13 11:22:33.123456789
+5339-02-01 5338-10-24 12:37:26.876543211 5339-05-11 11:22:33.123456789 5339-05-11 11:22:33.123456789 5338-10-24 12:37:26.876543211 5338-10-24 12:37:26.876543211 5339-05-11 11:22:33.123456789
+5344-10-04 5344-06-26 12:37:26.876543211 5345-01-11 11:22:33.123456789 5345-01-11 11:22:33.123456789 5344-06-26 12:37:26.876543211 5344-06-26 12:37:26.876543211 5345-01-11 11:22:33.123456789
5397-07-13 5397-04-04 12:37:26.876543211 5397-10-20 11:22:33.123456789 5397-10-20 11:22:33.123456789 5397-04-04 12:37:26.876543211 5397-04-04 12:37:26.876543211 5397-10-20 11:22:33.123456789
5966-07-09 5966-03-31 12:37:26.876543211 5966-10-16 11:22:33.123456789 5966-10-16 11:22:33.123456789 5966-03-31 12:37:26.876543211 5966-03-31 12:37:26.876543211 5966-10-16 11:22:33.123456789
6229-06-28 6229-03-20 12:37:26.876543211 6229-10-05 11:22:33.123456789 6229-10-05 11:22:33.123456789 6229-03-20 12:37:26.876543211 6229-03-20 12:37:26.876543211 6229-10-05 11:22:33.123456789
-6482-04-27 6482-01-17 11:37:26.876543211 6482-08-04 11:22:33.123456789 6482-08-04 11:22:33.123456789 6482-01-17 11:37:26.876543211 6482-01-17 11:37:26.876543211 6482-08-04 11:22:33.123456789
-6631-11-13 6631-08-05 13:37:26.876543211 6632-02-20 11:22:33.123456789 6632-02-20 11:22:33.123456789 6631-08-05 13:37:26.876543211 6631-08-05 13:37:26.876543211 6632-02-20 11:22:33.123456789
-6705-09-28 6705-06-20 12:37:26.876543211 6706-01-05 10:22:33.123456789 6706-01-05 10:22:33.123456789 6705-06-20 12:37:26.876543211 6705-06-20 12:37:26.876543211 6706-01-05 10:22:33.123456789
-6731-02-12 6730-11-04 12:37:26.876543211 6731-05-22 12:22:33.123456789 6731-05-22 12:22:33.123456789 6730-11-04 12:37:26.876543211 6730-11-04 12:37:26.876543211 6731-05-22 12:22:33.123456789
-7160-12-02 7160-08-24 13:37:26.876543211 7161-03-11 11:22:33.123456789 7161-03-11 11:22:33.123456789 7160-08-24 13:37:26.876543211 7160-08-24 13:37:26.876543211 7161-03-11 11:22:33.123456789
-7409-09-07 7409-05-30 12:37:26.876543211 7409-12-15 10:22:33.123456789 7409-12-15 10:22:33.123456789 7409-05-30 12:37:26.876543211 7409-05-30 12:37:26.876543211 7409-12-15 10:22:33.123456789
+6482-04-27 6482-01-17 12:37:26.876543211 6482-08-04 11:22:33.123456789 6482-08-04 11:22:33.123456789 6482-01-17 12:37:26.876543211 6482-01-17 12:37:26.876543211 6482-08-04 11:22:33.123456789
+6631-11-13 6631-08-05 12:37:26.876543211 6632-02-20 11:22:33.123456789 6632-02-20 11:22:33.123456789 6631-08-05 12:37:26.876543211 6631-08-05 12:37:26.876543211 6632-02-20 11:22:33.123456789
+6705-09-28 6705-06-20 12:37:26.876543211 6706-01-05 11:22:33.123456789 6706-01-05 11:22:33.123456789 6705-06-20 12:37:26.876543211 6705-06-20 12:37:26.876543211 6706-01-05 11:22:33.123456789
+6731-02-12 6730-11-04 12:37:26.876543211 6731-05-22 11:22:33.123456789 6731-05-22 11:22:33.123456789 6730-11-04 12:37:26.876543211 6730-11-04 12:37:26.876543211 6731-05-22 11:22:33.123456789
+7160-12-02 7160-08-24 12:37:26.876543211 7161-03-11 11:22:33.123456789 7161-03-11 11:22:33.123456789 7160-08-24 12:37:26.876543211 7160-08-24 12:37:26.876543211 7161-03-11 11:22:33.123456789
+7409-09-07 7409-05-30 12:37:26.876543211 7409-12-15 11:22:33.123456789 7409-12-15 11:22:33.123456789 7409-05-30 12:37:26.876543211 7409-05-30 12:37:26.876543211 7409-12-15 11:22:33.123456789
7503-06-23 7503-03-15 12:37:26.876543211 7503-09-30 11:22:33.123456789 7503-09-30 11:22:33.123456789 7503-03-15 12:37:26.876543211 7503-03-15 12:37:26.876543211 7503-09-30 11:22:33.123456789
8422-07-22 8422-04-13 12:37:26.876543211 8422-10-29 11:22:33.123456789 8422-10-29 11:22:33.123456789 8422-04-13 12:37:26.876543211 8422-04-13 12:37:26.876543211 8422-10-29 11:22:33.123456789
-8521-01-16 8520-10-08 13:37:26.876543211 8521-04-25 12:22:33.123456789 8521-04-25 12:22:33.123456789 8520-10-08 13:37:26.876543211 8520-10-08 13:37:26.876543211 8521-04-25 12:22:33.123456789
-9075-06-13 9075-03-05 11:37:26.876543211 9075-09-20 11:22:33.123456789 9075-09-20 11:22:33.123456789 9075-03-05 11:37:26.876543211 9075-03-05 11:37:26.876543211 9075-09-20 11:22:33.123456789
-9209-11-11 9209-08-03 13:37:26.876543211 9210-02-18 11:22:33.123456789 9210-02-18 11:22:33.123456789 9209-08-03 13:37:26.876543211 9209-08-03 13:37:26.876543211 9210-02-18 11:22:33.123456789
-9403-01-09 9402-10-01 13:37:26.876543211 9403-04-18 12:22:33.123456789 9403-04-18 12:22:33.123456789 9402-10-01 13:37:26.876543211 9402-10-01 13:37:26.876543211 9403-04-18 12:22:33.123456789
+8521-01-16 8520-10-08 12:37:26.876543211 8521-04-25 11:22:33.123456789 8521-04-25 11:22:33.123456789 8520-10-08 12:37:26.876543211 8520-10-08 12:37:26.876543211 8521-04-25 11:22:33.123456789
+9075-06-13 9075-03-05 12:37:26.876543211 9075-09-20 11:22:33.123456789 9075-09-20 11:22:33.123456789 9075-03-05 12:37:26.876543211 9075-03-05 12:37:26.876543211 9075-09-20 11:22:33.123456789
+9209-11-11 9209-08-03 12:37:26.876543211 9210-02-18 11:22:33.123456789 9210-02-18 11:22:33.123456789 9209-08-03 12:37:26.876543211 9209-08-03 12:37:26.876543211 9210-02-18 11:22:33.123456789
+9403-01-09 9402-10-01 12:37:26.876543211 9403-04-18 11:22:33.123456789 9403-04-18 11:22:33.123456789 9402-10-01 12:37:26.876543211 9402-10-01 12:37:26.876543211 9403-04-18 11:22:33.123456789
PREHOOK: query: explain vectorization expression
select
dateval,
@@ -1094,50 +1094,50 @@ tsval _c1 _c2 _c3 _c4 _c5 _c6
1404-07-23 15:32:16.059185026 1404-04-15 04:09:42.935728237 1404-10-31 02:54:49.182641815 1404-10-31 02:54:49.182641815 1404-04-15 04:09:42.935728237 1404-04-15 04:09:42.935728237 1404-10-31 02:54:49.182641815
1815-05-06 00:12:37.543584705 1815-01-26 12:50:04.420127916 1815-08-13 11:35:10.667041494 1815-08-13 11:35:10.667041494 1815-01-26 12:50:04.420127916 1815-01-26 12:50:04.420127916 1815-08-13 11:35:10.667041494
1883-04-17 04:14:34.647766229 1883-01-07 16:52:01.52430944 1883-07-25 15:37:07.771223018 1883-07-25 15:37:07.771223018 1883-01-07 16:52:01.52430944 1883-01-07 16:52:01.52430944 1883-07-25 15:37:07.771223018
-1966-08-16 13:36:50.183618031 1966-05-09 02:14:17.060161242 1966-11-23 23:59:23.30707482 1966-11-23 23:59:23.30707482 1966-05-09 02:14:17.060161242 1966-05-09 02:14:17.060161242 1966-11-23 23:59:23.30707482
-1973-04-17 06:30:38.596784156 1973-01-07 19:08:05.473327367 1973-07-25 18:53:11.720240945 1973-07-25 18:53:11.720240945 1973-01-07 19:08:05.473327367 1973-01-07 19:08:05.473327367 1973-07-25 18:53:11.720240945
-1974-10-04 17:21:03.989 1974-06-27 05:58:30.865543211 1975-01-12 03:43:37.112456789 1975-01-12 03:43:37.112456789 1974-06-27 05:58:30.865543211 1974-06-27 05:58:30.865543211 1975-01-12 03:43:37.112456789
-1976-03-03 04:54:33.000895162 1975-11-24 17:31:59.877438373 1976-06-10 17:17:06.124351951 1976-06-10 17:17:06.124351951 1975-11-24 17:31:59.877438373 1975-11-24 17:31:59.877438373 1976-06-10 17:17:06.124351951
-1976-05-06 00:42:30.910786948 1976-01-27 12:19:57.787330159 1976-08-13 12:05:04.034243737 1976-08-13 12:05:04.034243737 1976-01-27 12:19:57.787330159 1976-01-27 12:19:57.787330159 1976-08-13 12:05:04.034243737
-1978-08-05 14:41:05.501 1978-04-28 02:18:32.377543211 1978-11-13 01:03:38.624456789 1978-11-13 01:03:38.624456789 1978-04-28 02:18:32.377543211 1978-04-28 02:18:32.377543211 1978-11-13 01:03:38.624456789
-1981-04-25 09:01:12.077192689 1981-01-15 21:38:38.9537359 1981-08-02 21:23:45.200649478 1981-08-02 21:23:45.200649478 1981-01-15 21:38:38.9537359 1981-01-15 21:38:38.9537359 1981-08-02 21:23:45.200649478
-1981-11-15 23:03:10.999338387 1981-08-08 12:40:37.875881598 1982-02-23 10:25:44.122795176 1982-02-23 10:25:44.122795176 1981-08-08 12:40:37.875881598 1981-08-08 12:40:37.875881598 1982-02-23 10:25:44.122795176
-1985-07-20 09:30:11 1985-04-11 21:07:37.876543211 1985-10-27 19:52:44.123456789 1985-10-27 19:52:44.123456789 1985-04-11 21:07:37.876543211 1985-04-11 21:07:37.876543211 1985-10-27 19:52:44.123456789
-1985-11-18 16:37:54 1985-08-11 06:15:20.876543211 1986-02-26 04:00:27.123456789 1986-02-26 04:00:27.123456789 1985-08-11 06:15:20.876543211 1985-08-11 06:15:20.876543211 1986-02-26 04:00:27.123456789
-1987-02-21 19:48:29 1986-11-14 08:25:55.876543211 1987-06-01 08:11:02.123456789 1987-06-01 08:11:02.123456789 1986-11-14 08:25:55.876543211 1986-11-14 08:25:55.876543211 1987-06-01 08:11:02.123456789
-1987-05-28 13:52:07.900916635 1987-02-18 01:29:34.777459846 1987-09-05 01:14:41.024373424 1987-09-05 01:14:41.024373424 1987-02-18 01:29:34.777459846 1987-02-18 01:29:34.777459846 1987-09-05 01:14:41.024373424
-1998-10-16 20:05:29.397591987 1998-07-09 08:42:56.274135198 1999-01-24 06:28:02.521048776 1999-01-24 06:28:02.521048776 1998-07-09 08:42:56.274135198 1998-07-09 08:42:56.274135198 1999-01-24 06:28:02.521048776
-1999-10-03 16:59:10.396903939 1999-06-26 05:36:37.27344715 2000-01-11 03:21:43.520360728 2000-01-11 03:21:43.520360728 1999-06-26 05:36:37.27344715 1999-06-26 05:36:37.27344715 2000-01-11 03:21:43.520360728
-2000-12-18 08:42:30.000595596 2000-09-09 22:19:56.877138807 2001-03-27 20:05:03.124052385 2001-03-27 20:05:03.124052385 2000-09-09 22:19:56.877138807 2000-09-09 22:19:56.877138807 2001-03-27 20:05:03.124052385
-2002-05-10 05:29:48.990818073 2002-01-30 17:07:15.867361284 2002-08-17 16:52:22.114274862 2002-08-17 16:52:22.114274862 2002-01-30 17:07:15.867361284 2002-01-30 17:07:15.867361284 2002-08-17 16:52:22.114274862
-2003-09-23 22:33:17.00003252 2003-06-16 11:10:43.876575731 2004-01-01 08:55:50.123489309 2004-01-01 08:55:50.123489309 2003-06-16 11:10:43.876575731 2003-06-16 11:10:43.876575731 2004-01-01 08:55:50.123489309
-2004-03-07 20:14:13 2003-11-29 08:51:39.876543211 2004-06-15 08:36:46.123456789 2004-06-15 08:36:46.123456789 2003-11-29 08:51:39.876543211 2003-11-29 08:51:39.876543211 2004-06-15 08:36:46.123456789
-2007-02-09 05:17:29.368756876 2006-11-01 17:54:56.245300087 2007-05-19 17:40:02.492213665 2007-05-19 17:40:02.492213665 2006-11-01 17:54:56.245300087 2006-11-01 17:54:56.245300087 2007-05-19 17:40:02.492213665
-2009-01-21 10:49:07.108 2008-10-14 00:26:33.984543211 2009-04-30 23:11:40.231456789 2009-04-30 23:11:40.231456789 2008-10-14 00:26:33.984543211 2008-10-14 00:26:33.984543211 2009-04-30 23:11:40.231456789
-2010-04-08 02:43:35.861742727 2009-12-29 14:21:02.738285938 2010-07-16 14:06:08.985199516 2010-07-16 14:06:08.985199516 2009-12-29 14:21:02.738285938 2009-12-29 14:21:02.738285938 2010-07-16 14:06:08.985199516
-2013-04-07 02:44:43.00086821 2012-12-28 14:22:09.877411421 2013-07-15 14:07:16.124324999 2013-07-15 14:07:16.124324999 2012-12-28 14:22:09.877411421 2012-12-28 14:22:09.877411421 2013-07-15 14:07:16.124324999
-2013-04-10 00:43:46.854731546 2012-12-31 12:21:13.731274757 2013-07-18 12:06:19.978188335 2013-07-18 12:06:19.978188335 2012-12-31 12:21:13.731274757 2012-12-31 12:21:13.731274757 2013-07-18 12:06:19.978188335
-2021-09-24 03:18:32.413655165 2021-06-16 15:55:59.290198376 2022-01-01 13:41:05.537111954 2022-01-01 13:41:05.537111954 2021-06-16 15:55:59.290198376 2021-06-16 15:55:59.290198376 2022-01-01 13:41:05.537111954
-2024-11-11 16:42:41.101 2024-08-04 06:20:07.977543211 2025-02-19 04:05:14.224456789 2025-02-19 04:05:14.224456789 2024-08-04 06:20:07.977543211 2024-08-04 06:20:07.977543211 2025-02-19 04:05:14.224456789
+1966-08-16 13:36:50.183618031 1966-05-09 02:14:17.060161242 1966-11-24 00:59:23.30707482 1966-11-24 00:59:23.30707482 1966-05-09 02:14:17.060161242 1966-05-09 02:14:17.060161242 1966-11-24 00:59:23.30707482
+1973-04-17 06:30:38.596784156 1973-01-07 19:08:05.473327367 1973-07-25 17:53:11.720240945 1973-07-25 17:53:11.720240945 1973-01-07 19:08:05.473327367 1973-01-07 19:08:05.473327367 1973-07-25 17:53:11.720240945
+1974-10-04 17:21:03.989 1974-06-27 05:58:30.865543211 1975-01-12 04:43:37.112456789 1975-01-12 04:43:37.112456789 1974-06-27 05:58:30.865543211 1974-06-27 05:58:30.865543211 1975-01-12 04:43:37.112456789
+1976-03-03 04:54:33.000895162 1975-11-24 17:31:59.877438373 1976-06-10 16:17:06.124351951 1976-06-10 16:17:06.124351951 1975-11-24 17:31:59.877438373 1975-11-24 17:31:59.877438373 1976-06-10 16:17:06.124351951
+1976-05-06 00:42:30.910786948 1976-01-27 13:19:57.787330159 1976-08-13 12:05:04.034243737 1976-08-13 12:05:04.034243737 1976-01-27 13:19:57.787330159 1976-01-27 13:19:57.787330159 1976-08-13 12:05:04.034243737
+1978-08-05 14:41:05.501 1978-04-28 03:18:32.377543211 1978-11-13 02:03:38.624456789 1978-11-13 02:03:38.624456789 1978-04-28 03:18:32.377543211 1978-04-28 03:18:32.377543211 1978-11-13 02:03:38.624456789
+1981-04-25 09:01:12.077192689 1981-01-15 21:38:38.9537359 1981-08-02 20:23:45.200649478 1981-08-02 20:23:45.200649478 1981-01-15 21:38:38.9537359 1981-01-15 21:38:38.9537359 1981-08-02 20:23:45.200649478
+1981-11-15 23:03:10.999338387 1981-08-08 11:40:37.875881598 1982-02-23 10:25:44.122795176 1982-02-23 10:25:44.122795176 1981-08-08 11:40:37.875881598 1981-08-08 11:40:37.875881598 1982-02-23 10:25:44.122795176
+1985-07-20 09:30:11 1985-04-11 22:07:37.876543211 1985-10-27 20:52:44.123456789 1985-10-27 20:52:44.123456789 1985-04-11 22:07:37.876543211 1985-04-11 22:07:37.876543211 1985-10-27 20:52:44.123456789
+1985-11-18 16:37:54 1985-08-11 05:15:20.876543211 1986-02-26 04:00:27.123456789 1986-02-26 04:00:27.123456789 1985-08-11 05:15:20.876543211 1985-08-11 05:15:20.876543211 1986-02-26 04:00:27.123456789
+1987-02-21 19:48:29 1986-11-14 08:25:55.876543211 1987-06-01 07:11:02.123456789 1987-06-01 07:11:02.123456789 1986-11-14 08:25:55.876543211 1986-11-14 08:25:55.876543211 1987-06-01 07:11:02.123456789
+1987-05-28 13:52:07.900916635 1987-02-18 02:29:34.777459846 1987-09-05 01:14:41.024373424 1987-09-05 01:14:41.024373424 1987-02-18 02:29:34.777459846 1987-02-18 02:29:34.777459846 1987-09-05 01:14:41.024373424
+1998-10-16 20:05:29.397591987 1998-07-09 08:42:56.274135198 1999-01-24 07:28:02.521048776 1999-01-24 07:28:02.521048776 1998-07-09 08:42:56.274135198 1998-07-09 08:42:56.274135198 1999-01-24 07:28:02.521048776
+1999-10-03 16:59:10.396903939 1999-06-26 05:36:37.27344715 2000-01-11 04:21:43.520360728 2000-01-11 04:21:43.520360728 1999-06-26 05:36:37.27344715 1999-06-26 05:36:37.27344715 2000-01-11 04:21:43.520360728
+2000-12-18 08:42:30.000595596 2000-09-09 21:19:56.877138807 2001-03-27 20:05:03.124052385 2001-03-27 20:05:03.124052385 2000-09-09 21:19:56.877138807 2000-09-09 21:19:56.877138807 2001-03-27 20:05:03.124052385
+2002-05-10 05:29:48.990818073 2002-01-30 18:07:15.867361284 2002-08-17 16:52:22.114274862 2002-08-17 16:52:22.114274862 2002-01-30 18:07:15.867361284 2002-01-30 18:07:15.867361284 2002-08-17 16:52:22.114274862
+2003-09-23 22:33:17.00003252 2003-06-16 11:10:43.876575731 2004-01-01 09:55:50.123489309 2004-01-01 09:55:50.123489309 2003-06-16 11:10:43.876575731 2003-06-16 11:10:43.876575731 2004-01-01 09:55:50.123489309
+2004-03-07 20:14:13 2003-11-29 08:51:39.876543211 2004-06-15 07:36:46.123456789 2004-06-15 07:36:46.123456789 2003-11-29 08:51:39.876543211 2003-11-29 08:51:39.876543211 2004-06-15 07:36:46.123456789
+2007-02-09 05:17:29.368756876 2006-11-01 17:54:56.245300087 2007-05-19 16:40:02.492213665 2007-05-19 16:40:02.492213665 2006-11-01 17:54:56.245300087 2006-11-01 17:54:56.245300087 2007-05-19 16:40:02.492213665
+2009-01-21 10:49:07.108 2008-10-13 23:26:33.984543211 2009-04-30 22:11:40.231456789 2009-04-30 22:11:40.231456789 2008-10-13 23:26:33.984543211 2008-10-13 23:26:33.984543211 2009-04-30 22:11:40.231456789
+2010-04-08 02:43:35.861742727 2009-12-29 15:21:02.738285938 2010-07-16 14:06:08.985199516 2010-07-16 14:06:08.985199516 2009-12-29 15:21:02.738285938 2009-12-29 15:21:02.738285938 2010-07-16 14:06:08.985199516
+2013-04-07 02:44:43.00086821 2012-12-28 15:22:09.877411421 2013-07-15 14:07:16.124324999 2013-07-15 14:07:16.124324999 2012-12-28 15:22:09.877411421 2012-12-28 15:22:09.877411421 2013-07-15 14:07:16.124324999
+2013-04-10 00:43:46.854731546 2012-12-31 13:21:13.731274757 2013-07-18 12:06:19.978188335 2013-07-18 12:06:19.978188335 2012-12-31 13:21:13.731274757 2012-12-31 13:21:13.731274757 2013-07-18 12:06:19.978188335
+2021-09-24 03:18:32.413655165 2021-06-16 15:55:59.290198376 2022-01-01 14:41:05.537111954 2022-01-01 14:41:05.537111954 2021-06-16 15:55:59.290198376 2021-06-16 15:55:59.290198376 2022-01-01 14:41:05.537111954
+2024-11-11 16:42:41.101 2024-08-04 05:20:07.977543211 2025-02-19 04:05:14.224456789 2025-02-19 04:05:14.224456789 2024-08-04 05:20:07.977543211 2024-08-04 05:20:07.977543211 2025-02-19 04:05:14.224456789
4143-07-08 10:53:27.252802259 4143-03-30 23:30:54.12934547 4143-10-15 22:16:00.376259048 4143-10-15 22:16:00.376259048 4143-03-30 23:30:54.12934547 4143-03-30 23:30:54.12934547 4143-10-15 22:16:00.376259048
-4966-12-04 09:30:55.202 4966-08-26 23:08:22.078543211 4967-03-13 21:53:28.325456789 4967-03-13 21:53:28.325456789 4966-08-26 23:08:22.078543211 4966-08-26 23:08:22.078543211 4967-03-13 21:53:28.325456789
-5339-02-01 14:10:01.085678691 5338-10-25 03:47:27.962221902 5339-05-12 02:32:34.20913548 5339-05-12 02:32:34.20913548 5338-10-25 03:47:27.962221902 5338-10-25 03:47:27.962221902 5339-05-12 02:32:34.20913548
-5344-10-04 18:40:08.165 5344-06-27 07:17:35.041543211 5345-01-12 05:02:41.288456789 5345-01-12 05:02:41.288456789 5344-06-27 07:17:35.041543211 5344-06-27 07:17:35.041543211 5345-01-12 05:02:41.288456789
+4966-12-04 09:30:55.202 4966-08-26 22:08:22.078543211 4967-03-13 20:53:28.325456789 4967-03-13 20:53:28.325456789 4966-08-26 22:08:22.078543211 4966-08-26 22:08:22.078543211 4967-03-13 20:53:28.325456789
+5339-02-01 14:10:01.085678691 5338-10-25 02:47:27.962221902 5339-05-12 01:32:34.20913548 5339-05-12 01:32:34.20913548 5338-10-25 02:47:27.962221902 5338-10-25 02:47:27.962221902 5339-05-12 01:32:34.20913548
+5344-10-04 18:40:08.165 5344-06-27 07:17:35.041543211 5345-01-12 06:02:41.288456789 5345-01-12 06:02:41.288456789 5344-06-27 07:17:35.041543211 5344-06-27 07:17:35.041543211 5345-01-12 06:02:41.288456789
5397-07-13 07:12:32.000896438 5397-04-04 19:49:58.877439649 5397-10-20 18:35:05.124353227 5397-10-20 18:35:05.124353227 5397-04-04 19:49:58.877439649 5397-04-04 19:49:58.877439649 5397-10-20 18:35:05.124353227
5966-07-09 03:30:50.597 5966-03-31 16:08:17.473543211 5966-10-16 14:53:23.720456789 5966-10-16 14:53:23.720456789 5966-03-31 16:08:17.473543211 5966-03-31 16:08:17.473543211 5966-10-16 14:53:23.720456789
6229-06-28 02:54:28.970117179 6229-03-20 15:31:55.84666039 6229-10-05 14:17:02.093573968 6229-10-05 14:17:02.093573968 6229-03-20 15:31:55.84666039 6229-03-20 15:31:55.84666039 6229-10-05 14:17:02.093573968
-6482-04-27 12:07:38.073915413 6482-01-17 23:45:04.950458624 6482-08-04 23:30:11.197372202 6482-08-04 23:30:11.197372202 6482-01-17 23:45:04.950458624 6482-01-17 23:45:04.950458624 6482-08-04 23:30:11.197372202
-6631-11-13 16:31:29.702202248 6631-08-06 06:08:56.578745459 6632-02-21 03:54:02.825659037 6632-02-21 03:54:02.825659037 6631-08-06 06:08:56.578745459 6631-08-06 06:08:56.578745459 6632-02-21 03:54:02.825659037
-6705-09-28 18:27:28.000845672 6705-06-21 07:04:54.877388883 6706-01-06 04:50:01.124302461 6706-01-06 04:50:01.124302461 6705-06-21 07:04:54.877388883 6705-06-21 07:04:54.877388883 6706-01-06 04:50:01.124302461
-6731-02-12 08:12:48.287783702 6730-11-04 20:50:15.164326913 6731-05-22 20:35:21.411240491 6731-05-22 20:35:21.411240491 6730-11-04 20:50:15.164326913 6730-11-04 20:50:15.164326913 6731-05-22 20:35:21.411240491
-7160-12-02 06:00:24.81200852 7160-08-24 19:37:51.688551731 7161-03-11 17:22:57.935465309 7161-03-11 17:22:57.935465309 7160-08-24 19:37:51.688551731 7160-08-24 19:37:51.688551731 7161-03-11 17:22:57.935465309
-7409-09-07 23:33:32.459349602 7409-05-31 12:10:59.335892813 7409-12-16 09:56:05.582806391 7409-12-16 09:56:05.582806391 7409-05-31 12:10:59.335892813 7409-05-31 12:10:59.335892813 7409-12-16 09:56:05.582806391
+6482-04-27 12:07:38.073915413 6482-01-18 00:45:04.950458624 6482-08-04 23:30:11.197372202 6482-08-04 23:30:11.197372202 6482-01-18 00:45:04.950458624 6482-01-18 00:45:04.950458624 6482-08-04 23:30:11.197372202
+6631-11-13 16:31:29.702202248 6631-08-06 05:08:56.578745459 6632-02-21 03:54:02.825659037 6632-02-21 03:54:02.825659037 6631-08-06 05:08:56.578745459 6631-08-06 05:08:56.578745459 6632-02-21 03:54:02.825659037
+6705-09-28 18:27:28.000845672 6705-06-21 07:04:54.877388883 6706-01-06 05:50:01.124302461 6706-01-06 05:50:01.124302461 6705-06-21 07:04:54.877388883 6705-06-21 07:04:54.877388883 6706-01-06 05:50:01.124302461
+6731-02-12 08:12:48.287783702 6730-11-04 20:50:15.164326913 6731-05-22 19:35:21.411240491 6731-05-22 19:35:21.411240491 6730-11-04 20:50:15.164326913 6730-11-04 20:50:15.164326913 6731-05-22 19:35:21.411240491
+7160-12-02 06:00:24.81200852 7160-08-24 18:37:51.688551731 7161-03-11 17:22:57.935465309 7161-03-11 17:22:57.935465309 7160-08-24 18:37:51.688551731 7160-08-24 18:37:51.688551731 7161-03-11 17:22:57.935465309
+7409-09-07 23:33:32.459349602 7409-05-31 12:10:59.335892813 7409-12-16 10:56:05.582806391 7409-12-16 10:56:05.582806391 7409-05-31 12:10:59.335892813 7409-05-31 12:10:59.335892813 7409-12-16 10:56:05.582806391
7503-06-23 23:14:17.486 7503-03-16 11:51:44.362543211 7503-10-01 10:36:50.609456789 7503-10-01 10:36:50.609456789 7503-03-16 11:51:44.362543211 7503-03-16 11:51:44.362543211 7503-10-01 10:36:50.609456789
8422-07-22 03:21:45.745036084 8422-04-13 15:59:12.621579295 8422-10-29 14:44:18.868492873 8422-10-29 14:44:18.868492873 8422-04-13 15:59:12.621579295 8422-04-13 15:59:12.621579295 8422-10-29 14:44:18.868492873
-8521-01-16 20:42:05.668832388 8520-10-09 10:19:32.545375599 8521-04-26 09:04:38.792289177 8521-04-26 09:04:38.792289177 8520-10-09 10:19:32.545375599 8520-10-09 10:19:32.545375599 8521-04-26 09:04:38.792289177
-9075-06-13 16:20:09.218517797 9075-03-06 03:57:36.095061008 9075-09-21 03:42:42.341974586 9075-09-21 03:42:42.341974586 9075-03-06 03:57:36.095061008 9075-03-06 03:57:36.095061008 9075-09-21 03:42:42.341974586
-9209-11-11 04:08:58.223768453 9209-08-03 17:46:25.100311664 9210-02-18 15:31:31.347225242 9210-02-18 15:31:31.347225242 9209-08-03 17:46:25.100311664 9209-08-03 17:46:25.100311664 9210-02-18 15:31:31.347225242
-9403-01-09 18:12:33.547 9402-10-02 07:50:00.423543211 9403-04-19 06:35:06.670456789 9403-04-19 06:35:06.670456789 9402-10-02 07:50:00.423543211 9402-10-02 07:50:00.423543211 9403-04-19 06:35:06.670456789
+8521-01-16 20:42:05.668832388 8520-10-09 09:19:32.545375599 8521-04-26 08:04:38.792289177 8521-04-26 08:04:38.792289177 8520-10-09 09:19:32.545375599 8520-10-09 09:19:32.545375599 8521-04-26 08:04:38.792289177
+9075-06-13 16:20:09.218517797 9075-03-06 04:57:36.095061008 9075-09-21 03:42:42.341974586 9075-09-21 03:42:42.341974586 9075-03-06 04:57:36.095061008 9075-03-06 04:57:36.095061008 9075-09-21 03:42:42.341974586
+9209-11-11 04:08:58.223768453 9209-08-03 16:46:25.100311664 9210-02-18 15:31:31.347225242 9210-02-18 15:31:31.347225242 9209-08-03 16:46:25.100311664 9209-08-03 16:46:25.100311664 9210-02-18 15:31:31.347225242
+9403-01-09 18:12:33.547 9402-10-02 06:50:00.423543211 9403-04-19 05:35:06.670456789 9403-04-19 05:35:06.670456789 9402-10-02 06:50:00.423543211 9402-10-02 06:50:00.423543211 9403-04-19 05:35:06.670456789
PREHOOK: query: explain vectorization expression
select
interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second,
diff --git a/ql/src/test/results/clientpositive/vectorization_13.q.out b/ql/src/test/results/clientpositive/vectorization_13.q.out
index 1cf64a3f95..250683dbd5 100644
--- a/ql/src/test/results/clientpositive/vectorization_13.q.out
+++ b/ql/src/test/results/clientpositive/vectorization_13.q.out
@@ -24,8 +24,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -57,8 +57,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -87,8 +87,8 @@ STAGE PLANS:
Filter Vectorization:
className: VectorFilterOperator
native: true
- predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 11.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val 12.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
- predicate: (((UDFToDouble(ctimestamp1) > 11.0D) and (UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+ predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28789.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28788.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+ predicate: (((UDFToDouble(ctimestamp1) > -28789.0D) and (UDFToDouble(ctimestamp2) <> -28788.0D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
Statistics: Num rows: 2730 Data size: 646063 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -246,8 +246,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -280,8 +280,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > 11)
- AND ((ctimestamp2 != 12)
+ OR ((ctimestamp1 > -28789)
+ AND ((ctimestamp2 != -28788)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -355,8 +355,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -388,8 +388,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -417,8 +417,8 @@ STAGE PLANS:
Filter Vectorization:
className: VectorFilterOperator
native: true
- predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -1.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -1.3359999999999999)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
- predicate: (((UDFToDouble(ctimestamp1) > -1.388D) and (UDFToDouble(ctimestamp2) <> -1.3359999999999999D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
+ predicateExpression: FilterExprOrExpr(children: FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 3569.0), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 10.175), FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28801.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterDoubleColNotEqualDoubleScalar(col 13:double, val -28801.336)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4))))
+ predicate: (((UDFToDouble(ctimestamp1) > -28801.388D) and (UDFToDouble(ctimestamp2) <> -28801.336D) and (CAST( ctinyint AS decimal(11,4)) < 9763215.5639)) or ((cfloat < 3569) and (cdouble <= 10.175D) and (cboolean1 <> 1))) (type: boolean)
Statistics: Num rows: 2730 Data size: 646063 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: cboolean1 (type: boolean), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cfloat (type: float), cstring1 (type: string), UDFToDouble(cfloat) (type: double), (UDFToDouble(cfloat) * UDFToDouble(cfloat)) (type: double), UDFToDouble(ctinyint) (type: double), (UDFToDouble(ctinyint) * UDFToDouble(ctinyint)) (type: double)
@@ -563,8 +563,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
@@ -597,8 +597,8 @@ FROM alltypesorc
WHERE (((cfloat < 3569)
AND ((10.175 >= cdouble)
AND (cboolean1 != 1)))
- OR ((ctimestamp1 > -1.388)
- AND ((ctimestamp2 != -1.3359999999999999)
+ OR ((ctimestamp1 > -28801.388)
+ AND ((ctimestamp2 != -28801.3359999999999999)
AND (ctinyint < 9763215.5639))))
GROUP BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1
ORDER BY cboolean1, ctinyint, ctimestamp1, cfloat, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15, c16
diff --git a/ql/src/test/results/clientpositive/vectorization_7.q.out b/ql/src/test/results/clientpositive/vectorization_7.q.out
index 3999bf25e8..933bb71f9e 100644
--- a/ql/src/test/results/clientpositive/vectorization_7.q.out
+++ b/ql/src/test/results/clientpositive/vectorization_7.q.out
@@ -16,11 +16,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -43,11 +43,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -74,8 +74,8 @@ STAGE PLANS:
Filter Vectorization:
className: VectorFilterOperator
native: true
- predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -15.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
- predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -15.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+ predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28815.0)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+ predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28815.0D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
Statistics: Num rows: 5461 Data size: 1292362 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -155,11 +155,11 @@ PREHOOK: query: SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -183,11 +183,11 @@ POSTHOOK: query: SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0)
+ AND (((ctimestamp1 <= -28800)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > -15)
+ OR ((ctimestamp2 > -28815)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -237,11 +237,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -264,11 +264,11 @@ SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -294,8 +294,8 @@ STAGE PLANS:
Filter Vectorization:
className: VectorFilterOperator
native: true
- predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val 7.6850000000000005)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
- predicate: (((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > 7.6850000000000005D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
+ predicateExpression: FilterExprAndExpr(children: FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, val -28800.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), FilterStringColLikeStringScalar(col 7:string, pattern ss)), FilterExprOrExpr(children: FilterDoubleColGreaterDoubleScalar(col 5:double, val 988888.0), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, val -28792.315)(children: CastTimestampToDouble(col 9:timestamp) -> 13:double), FilterDoubleColLessEqualDoubleScalar(col 5:double, val 3569.0))))
+ predicate: (((UDFToDouble(ctimestamp1) <= -28800.0D) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((cdouble > 988888.0D) or ((UDFToDouble(ctimestamp2) > -28792.315D) and (cdouble <= 3569.0D))) and (ctinyint <> 0Y)) (type: boolean)
Statistics: Num rows: 5461 Data size: 1292362 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: cboolean1 (type: boolean), cbigint (type: bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 (type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), (UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), (- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), (cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- ctinyint) % ctinyint) (type: tinyint)
@@ -369,11 +369,11 @@ PREHOOK: query: SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
@@ -397,11 +397,11 @@ POSTHOOK: query: SELECT cboolean1,
((-(ctinyint)) % ctinyint) as c9
FROM alltypesorc
WHERE ((ctinyint != 0)
- AND (((ctimestamp1 <= 0.0)
+ AND (((ctimestamp1 <= -28800.0)
OR ((ctinyint = cint)
OR (cstring2 LIKE 'ss')))
AND ((988888 < cdouble)
- OR ((ctimestamp2 > 7.6850000000000005)
+ OR ((ctimestamp2 > -28792.3149999999999995)
AND (3569 >= cdouble)))))
ORDER BY cboolean1, cbigint, csmallint, ctinyint, ctimestamp1, cstring1, c1, c2, c3, c4, c5, c6, c7, c8, c9
LIMIT 25
diff --git a/ql/src/test/results/clientpositive/vectorized_casts.q.out b/ql/src/test/results/clientpositive/vectorized_casts.q.out
index c79d8d7c7c..bf57518c2a 100644
--- a/ql/src/test/results/clientpositive/vectorized_casts.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_casts.q.out
@@ -180,7 +180,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [13, 14, 15, 16, 17, 18, 10, 20, 19, 21, 0, 1, 2, 3, 22, 23, 10, 24, 25, 27, 28, 29, 30, 31, 32, 33, 34, 4, 5, 35, 36, 37, 38, 39, 5, 41, 43, 45, 47, 48, 49, 51, 54, 55, 8, 56, 57, 26, 58, 59, 60, 61, 62, 63, 64, 65, 6, 67, 68, 69, 70, 66, 73]
- selectExpressions: CastLongToBooleanViaLongToLong(col 0:tinyint) -> 13:boolean, CastLongToBooleanViaLongToLong(col 1:smallint) -> 14:boolean, CastLongToBooleanViaLongToLong(col 2:int) -> 15:boolean, CastLongToBooleanViaLongToLong(col 3:bigint) -> 16:boolean, CastDoubleToBooleanViaDoubleToLong(col 4:float) -> 17:boolean, CastDoubleToBooleanViaDoubleToLong(col 5:double) -> 18:boolean, CastLongToBooleanViaLongToLong(col 19:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 19:bigint) -> 20:boolean, CastTimestampToBoolean(col 8:timestamp) -> 19:boolean, CastStringToBoolean(col 6) -> 21:boolean, CastDoubleToLong(col 4:float) -> 22:int, CastDoubleToLong(col 5:double) -> 23:int, CastTimestampToLong(col 8:timestamp) -> 24:int, CastStringToLong(col 6:string) -> 25:int, CastStringToLong(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 27:int, CastDoubleToLong(col 4:float) -> 28:tinyint, CastDoubleToLong(col 4:float) -> 29:smallint, CastDoubleToLong(col 4:float) -> 30:bigint, CastLongToDouble(col 0:tinyint) -> 31:double, CastLongToDouble(col 1:smallint) -> 32:double, CastLongToDouble(col 2:int) -> 33:double, CastLongToDouble(col 3:bigint) -> 34:double, CastLongToDouble(col 10:boolean) -> 35:double, CastTimestampToDouble(col 8:timestamp) -> 36:double, CastStringToDouble(col 6:string) -> 37:double, CastStringToDouble(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 38:double, CastLongToFloatViaLongToDouble(col 2:int) -> 39:float, CastMillisecondsLongToTimestamp(col 0:tinyint) -> 41:timestamp, CastMillisecondsLongToTimestamp(col 1:smallint) -> 43:timestamp, CastMillisecondsLongToTimestamp(col 2:int) -> 45:timestamp, CastMillisecondsLongToTimestamp(col 3:bigint) -> 47:timestamp, CastDoubleToTimestamp(col 4:float) -> 48:timestamp, CastDoubleToTimestamp(col 5:double) -> 49:timestamp, CastMillisecondsLongToTimestamp(col 10:boolean) -> 51:timestamp, CastMillisecondsLongToTimestamp(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 54:timestamp, CastDateToTimestamp(col 52:date)(children: CastTimestampToDate(col 8:timestamp) -> 52:date) -> 55:timestamp, VectorUDFAdaptor(CAST( cstring1 AS TIMESTAMP)) -> 56:timestamp, VectorUDFAdaptor(CAST( substr(cstring1, 1, 1) AS TIMESTAMP))(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 57:timestamp, CastLongToString(col 0:tinyint) -> 26:string, CastLongToString(col 1:smallint) -> 58:string, CastLongToString(col 2:int) -> 59:string, CastLongToString(col 3:bigint) -> 60:string, CastFloatToString(col 4:float) -> 61:string, CastDoubleToString(col 5:double) -> 62:string, CastBooleanToStringViaLongToString(col 10:boolean) -> 63:string, CastLongToString(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 64:string, VectorUDFAdaptor(UDFToString(ctimestamp1)) -> 65:string, CastStringGroupToString(col 66:char(10))(children: CastStringGroupToChar(col 6:string, maxLength 10) -> 66:char(10)) -> 67:string, CastStringGroupToString(col 66:varchar(10))(children: CastStringGroupToVarChar(col 6:string, maxLength 10) -> 66:varchar(10)) -> 68:string, CastLongToFloatViaLongToDouble(col 52:int)(children: CastDoubleToLong(col 4:float) -> 52:int) -> 69:float, CastLongToDouble(col 52:int)(children: LongColMultiplyLongScalar(col 2:int, val 2) -> 52:int) -> 70:double, CastDoubleToString(col 71:double)(children: FuncSinDoubleToDouble(col 4:float) -> 71:double) -> 66:string, DoubleColAddDoubleColumn(col 71:double, col 72:double)(children: col 71:float, CastLongToDouble(col 10:boolean) -> 72:double) -> 73:double
+ selectExpressions: CastLongToBooleanViaLongToLong(col 0:tinyint) -> 13:boolean, CastLongToBooleanViaLongToLong(col 1:smallint) -> 14:boolean, CastLongToBooleanViaLongToLong(col 2:int) -> 15:boolean, CastLongToBooleanViaLongToLong(col 3:bigint) -> 16:boolean, CastDoubleToBooleanViaDoubleToLong(col 4:float) -> 17:boolean, CastDoubleToBooleanViaDoubleToLong(col 5:double) -> 18:boolean, CastLongToBooleanViaLongToLong(col 19:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 19:bigint) -> 20:boolean, CastTimestampToBoolean(col 8:timestamp) -> 19:boolean, CastStringToBoolean(col 6) -> 21:boolean, CastDoubleToLong(col 4:float) -> 22:int, CastDoubleToLong(col 5:double) -> 23:int, CastTimestampToLong(col 8:timestamp) -> 24:int, CastStringToLong(col 6:string) -> 25:int, CastStringToLong(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 27:int, CastDoubleToLong(col 4:float) -> 28:tinyint, CastDoubleToLong(col 4:float) -> 29:smallint, CastDoubleToLong(col 4:float) -> 30:bigint, CastLongToDouble(col 0:tinyint) -> 31:double, CastLongToDouble(col 1:smallint) -> 32:double, CastLongToDouble(col 2:int) -> 33:double, CastLongToDouble(col 3:bigint) -> 34:double, CastLongToDouble(col 10:boolean) -> 35:double, CastTimestampToDouble(col 8:timestamp) -> 36:double, CastStringToDouble(col 6:string) -> 37:double, CastStringToDouble(col 26:string)(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 38:double, CastLongToFloatViaLongToDouble(col 2:int) -> 39:float, CastMillisecondsLongToTimestamp(col 0:tinyint) -> 41:timestamp, CastMillisecondsLongToTimestamp(col 1:smallint) -> 43:timestamp, CastMillisecondsLongToTimestamp(col 2:int) -> 45:timestamp, CastMillisecondsLongToTimestamp(col 3:bigint) -> 47:timestamp, CastDoubleToTimestamp(col 4:float) -> 48:timestamp, CastDoubleToTimestamp(col 5:double) -> 49:timestamp, CastMillisecondsLongToTimestamp(col 10:boolean) -> 51:timestamp, CastMillisecondsLongToTimestamp(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 54:timestamp, CastDateToTimestamp(col 52:date)(children: CastTimestampToDate(col 8:timestamp) -> 52:date) -> 55:timestamp, VectorUDFAdaptor(CAST( cstring1 AS TIMESTAMP)) -> 56:timestamp, VectorUDFAdaptor(CAST( substr(cstring1, 1, 1) AS TIMESTAMP))(children: StringSubstrColStartLen(col 6:string, start 0, length 1) -> 26:string) -> 57:timestamp, CastLongToString(col 0:tinyint) -> 26:string, CastLongToString(col 1:smallint) -> 58:string, CastLongToString(col 2:int) -> 59:string, CastLongToString(col 3:bigint) -> 60:string, CastFloatToString(col 4:float) -> 61:string, CastDoubleToString(col 5:double) -> 62:string, CastBooleanToStringViaLongToString(col 10:boolean) -> 63:string, CastLongToString(col 52:bigint)(children: LongColMultiplyLongScalar(col 3:bigint, val 0) -> 52:bigint) -> 64:string, CastTimestampToString(col 8:timestamp) -> 65:string, CastStringGroupToString(col 66:char(10))(children: CastStringGroupToChar(col 6:string, maxLength 10) -> 66:char(10)) -> 67:string, CastStringGroupToString(col 66:varchar(10))(children: CastStringGroupToVarChar(col 6:string, maxLength 10) -> 66:varchar(10)) -> 68:string, CastLongToFloatViaLongToDouble(col 52:int)(children: CastDoubleToLong(col 4:float) -> 52:int) -> 69:float, CastLongToDouble(col 52:int)(children: LongColMultiplyLongScalar(col 2:int, val 2) -> 52:int) -> 70:double, CastDoubleToString(col 71:double)(children: FuncSinDoubleToDouble(col 4:float) -> 71:double) -> 66:string, DoubleColAddDoubleColumn(col 71:double, col 72:double)(children: col 71:float, CastLongToDouble(col 10:boolean) -> 72:double) -> 73:double
Statistics: Num rows: 6144 Data size: 1453997 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
@@ -367,29 +367,29 @@ where cbigint % 250 = 0
POSTHOOK: type: QUERY
POSTHOOK: Input: default@alltypesorc
#### A masked pattern was here ####
-true NULL true true true NULL false false true true -51 NULL 773600971 1053923250 -51 NULL 0 8 NULL 2 -51 -51 -51 -51.0 NULL 7.73600971E8 1.05392325E9 -51.0 NULL 0.0 8.451 NULL 2.0 7.7360096E8 NULL 1969-12-31 15:59:59.949 NULL 1970-01-09 14:53:20.971 1970-01-12 20:45:23.25 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:08.451 NULL NULL -51 NULL 773600971 1053923250 -51.0 NULL FALSE 0 1969-12-31 16:00:08.451 2yK4Bx76O 2yK4Bx76O 2yK4Bx76O -51.0 1.547201942E9 -0.6702291758433747 7.7360096E8
-true NULL true true true NULL false false true true 8 NULL -102936434 -1312782750 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL -1.02936434E8 -1.31278275E9 8.0 NULL 0.0 15.892 NULL NULL -1.02936432E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-30 11:24:23.566 1969-12-16 11:20:17.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -102936434 -1312782750 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 eJROSNhugc3kQR7Pb eJROSNhugc eJROSNhugc 8.0 -2.05872868E8 0.9893582466233818 -1.02936432E8
-true NULL true true true NULL false false true true 8 NULL -661621138 -931392750 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL -6.61621138E8 -9.3139275E8 8.0 NULL 0.0 15.892 NULL NULL -6.6162112E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-24 00:12:58.862 1969-12-20 21:16:47.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -661621138 -931392750 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 L15l8i5k558tBcDV20 L15l8i5k55 L15l8i5k55 8.0 -1.323242276E9 0.9893582466233818 -6.6162112E8
-true NULL true true true NULL false false true true 8 NULL -669632311 1588591250 8 NULL 0 15 NULL 3 8 8 8 8.0 NULL -6.69632311E8 1.58859125E9 8.0 NULL 0.0 15.892 NULL 3.0 -6.6963232E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-23 21:59:27.689 1970-01-19 01:16:31.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -669632311 1588591250 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 3r3sDvfUkG0yTP3LnX5mNQRr 3r3sDvfUkG 3r3sDvfUkG 8.0 -1.339264622E9 0.9893582466233818 -6.6963232E8
-true NULL true true true NULL false false true true 8 NULL 805179664 868161500 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL 8.05179664E8 8.681615E8 8.0 NULL 0.0 15.892 NULL NULL 8.0517965E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-09 23:39:39.664 1970-01-10 17:09:21.5 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 805179664 868161500 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 e005B5q e005B5q e005B5q 8.0 1.610359328E9 0.9893582466233818 8.05179648E8
-true NULL true true true NULL true false true true -51 NULL 747553882 -1930467250 -51 NULL 1 8 NULL NULL -51 -51 -51 -51.0 NULL 7.47553882E8 -1.93046725E9 -51.0 NULL 1.0 8.451 NULL NULL 7.4755386E8 NULL 1969-12-31 15:59:59.949 NULL 1970-01-09 07:39:13.882 1969-12-09 07:45:32.75 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:08.451 NULL NULL -51 NULL 747553882 -1930467250 -51.0 NULL TRUE 0 1969-12-31 16:00:08.451 q8M86Fx0r q8M86Fx0r q8M86Fx0r -51.0 1.495107764E9 -0.6702291758433747 7.47553857E8
-true NULL true true true NULL true false true true 11 NULL -335450417 1233327000 11 NULL 1 2 NULL NULL 11 11 11 11.0 NULL -3.35450417E8 1.233327E9 11.0 NULL 1.0 2.351 NULL NULL -3.35450432E8 NULL 1969-12-31 16:00:00.011 NULL 1969-12-27 18:49:09.583 1970-01-14 22:35:27 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:02.351 NULL NULL 11 NULL -335450417 1233327000 11.0 NULL TRUE 0 1969-12-31 16:00:02.351 dOYnqgaXoJ1P3ERwxe5N7 dOYnqgaXoJ dOYnqgaXoJ 11.0 -6.70900834E8 -0.9999902065507035 -3.35450431E8
-true NULL true true true NULL true false true true 11 NULL -64615982 1803053750 11 NULL 1 2 NULL 8 11 11 11 11.0 NULL -6.4615982E7 1.80305375E9 11.0 NULL 1.0 2.351 NULL 8.0 -6.4615984E7 NULL 1969-12-31 16:00:00.011 NULL 1969-12-30 22:03:04.018 1970-01-21 12:50:53.75 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:02.351 NULL NULL 11 NULL -64615982 1803053750 11.0 NULL TRUE 0 1969-12-31 16:00:02.351 8J5OB7K26PEV7kdbeHr3 8J5OB7K26P 8J5OB7K26P 11.0 -1.29231964E8 -0.9999902065507035 -6.4615983E7
-true NULL true true true NULL true false true true 8 NULL 890988972 -1862301000 8 NULL 1 15 NULL NULL 8 8 8 8.0 NULL 8.90988972E8 -1.862301E9 8.0 NULL 1.0 15.892 NULL NULL 8.9098899E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-10 23:29:48.972 1969-12-10 02:41:39 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 890988972 -1862301000 8.0 NULL TRUE 0 1969-12-31 16:00:15.892 XylAH4 XylAH4 XylAH4 8.0 1.781977944E9 0.9893582466233818 8.90988993E8
-true NULL true true true NULL true false true true 8 NULL 930867246 1205399250 8 NULL 1 15 NULL NULL 8 8 8 8.0 NULL 9.30867246E8 1.20539925E9 8.0 NULL 1.0 15.892 NULL NULL 9.3086726E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-11 10:34:27.246 1970-01-14 14:49:59.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 930867246 1205399250 8.0 NULL TRUE 0 1969-12-31 16:00:15.892 c1V8o1A c1V8o1A c1V8o1A 8.0 1.861734492E9 0.9893582466233818 9.30867265E8
-true true NULL true true true NULL false true NULL -14 -7196 NULL -1552199500 -14 -7196 NULL 11 NULL NULL -14 -14 -14 -14.0 -7196.0 NULL -1.5521995E9 -14.0 -7196.0 NULL 11.065 NULL NULL NULL -7196.0 1969-12-31 15:59:59.986 1969-12-31 15:59:52.804 NULL 1969-12-13 16:50:00.5 1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:11.065 NULL NULL -14 -7196 NULL -1552199500 -14.0 -7196.0 NULL 0 1969-12-31 16:00:11.065 NULL NULL NULL -14.0 NULL -0.9906073556948704 NULL
-true true NULL true true true NULL false true NULL -21 -7196 NULL 1542429000 -21 -7196 NULL -5 NULL NULL -21 -21 -21 -21.0 -7196.0 NULL 1.542429E9 -21.0 -7196.0 NULL -4.1 NULL NULL NULL -7196.0 1969-12-31 15:59:59.979 1969-12-31 15:59:52.804 NULL 1970-01-18 12:27:09 1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 15:59:55.9 NULL NULL -21 -7196 NULL 1542429000 -21.0 -7196.0 NULL 0 1969-12-31 15:59:55.9 NULL NULL NULL -21.0 NULL -0.8366556385360561 NULL
-true true NULL true true true NULL false true NULL -24 -7196 NULL 829111000 -24 -7196 NULL -7 NULL NULL -24 -24 -24 -24.0 -7196.0 NULL 8.29111E8 -24.0 -7196.0 NULL -6.855 NULL NULL NULL -7196.0 1969-12-31 15:59:59.976 1969-12-31 15:59:52.804 NULL 1970-01-10 06:18:31 1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 15:59:53.145 NULL NULL -24 -7196 NULL 829111000 -24.0 -7196.0 NULL 0 1969-12-31 15:59:53.145 NULL NULL NULL -24.0 NULL 0.9055783620066238 NULL
-true true NULL true true true NULL false true NULL -30 -200 NULL 1429852250 -30 -200 NULL 12 NULL NULL -30 -30 -30 -30.0 -200.0 NULL 1.42985225E9 -30.0 -200.0 NULL 12.935 NULL NULL NULL -200.0 1969-12-31 15:59:59.97 1969-12-31 15:59:59.8 NULL 1970-01-17 05:10:52.25 1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:12.935 NULL NULL -30 -200 NULL 1429852250 -30.0 -200.0 NULL 0 1969-12-31 16:00:12.935 NULL NULL NULL -30.0 NULL 0.9880316240928618 NULL
-true true NULL true true true NULL false true NULL -36 -200 NULL -2006216750 -36 -200 NULL -15 NULL NULL -36 -36 -36 -36.0 -200.0 NULL -2.00621675E9 -36.0 -200.0 NULL -14.252 NULL NULL NULL -200.0 1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1969-12-08 10:43:03.25 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 15:59:45.748 NULL NULL -36 -200 NULL -2006216750 -36.0 -200.0 NULL 0 1969-12-31 15:59:45.748 NULL NULL NULL -36.0 NULL 0.9917788534431158 NULL
-true true NULL true true true NULL false true NULL -36 -200 NULL 1599879000 -36 -200 NULL -7 NULL NULL -36 -36 -36 -36.0 -200.0 NULL 1.599879E9 -36.0 -200.0 NULL -6.183 NULL NULL NULL -200.0 1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1970-01-19 04:24:39 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 15:59:53.817 NULL NULL -36 -200 NULL 1599879000 -36.0 -200.0 NULL 0 1969-12-31 15:59:53.817 NULL NULL NULL -36.0 NULL 0.9917788534431158 NULL
-true true NULL true true true NULL false true NULL -38 15601 NULL -1858689000 -38 15601 NULL -2 NULL NULL -38 -38 -38 -38.0 15601.0 NULL -1.858689E9 -38.0 15601.0 NULL -1.3860000000000001 NULL NULL NULL 15601.0 1969-12-31 15:59:59.962 1969-12-31 16:00:15.601 NULL 1969-12-10 03:41:51 1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 15:59:58.614 NULL NULL -38 15601 NULL -1858689000 -38.0 15601.0 NULL 0 1969-12-31 15:59:58.614 NULL NULL NULL -38.0 NULL -0.2963685787093853 NULL
-true true NULL true true true NULL false true NULL -5 15601 NULL 612416000 -5 15601 NULL 4 NULL NULL -5 -5 -5 -5.0 15601.0 NULL 6.12416E8 -5.0 15601.0 NULL 4.679 NULL NULL NULL 15601.0 1969-12-31 15:59:59.995 1969-12-31 16:00:15.601 NULL 1970-01-07 18:06:56 1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:04.679 NULL NULL -5 15601 NULL 612416000 -5.0 15601.0 NULL 0 1969-12-31 16:00:04.679 NULL NULL NULL -5.0 NULL 0.9589242746631385 NULL
-true true NULL true true true NULL false true NULL -50 -7196 NULL -1031187250 -50 -7196 NULL -6 NULL NULL -50 -50 -50 -50.0 -7196.0 NULL -1.03118725E9 -50.0 -7196.0 NULL -5.267 NULL NULL NULL -7196.0 1969-12-31 15:59:59.95 1969-12-31 15:59:52.804 NULL 1969-12-19 17:33:32.75 1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 15:59:54.733 NULL NULL -50 -7196 NULL -1031187250 -50.0 -7196.0 NULL 0 1969-12-31 15:59:54.733 NULL NULL NULL -50.0 NULL 0.26237485370392877 NULL
-true true NULL true true true NULL false true NULL -59 -7196 NULL -1604890000 -59 -7196 NULL 13 NULL NULL -59 -59 -59 -59.0 -7196.0 NULL -1.60489E9 -59.0 -7196.0 NULL 13.15 NULL NULL NULL -7196.0 1969-12-31 15:59:59.941 1969-12-31 15:59:52.804 NULL 1969-12-13 02:11:50 1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:13.15 NULL NULL -59 -7196 NULL -1604890000 -59.0 -7196.0 NULL 0 1969-12-31 16:00:13.15 NULL NULL NULL -59.0 NULL -0.6367380071391379 NULL
-true true NULL true true true NULL false true NULL -60 -7196 NULL 1516314750 -60 -7196 NULL -8 NULL NULL -60 -60 -60 -60.0 -7196.0 NULL 1.51631475E9 -60.0 -7196.0 NULL -7.592 NULL NULL NULL -7196.0 1969-12-31 15:59:59.94 1969-12-31 15:59:52.804 NULL 1970-01-18 05:11:54.75 1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 15:59:52.408 NULL NULL -60 -7196 NULL 1516314750 -60.0 -7196.0 NULL 0 1969-12-31 15:59:52.408 NULL NULL NULL -60.0 NULL 0.3048106211022167 NULL
-true true NULL true true true NULL false true NULL -8 -7196 NULL -1849991500 -8 -7196 NULL 3 NULL NULL -8 -8 -8 -8.0 -7196.0 NULL -1.8499915E9 -8.0 -7196.0 NULL 3.136 NULL NULL NULL -7196.0 1969-12-31 15:59:59.992 1969-12-31 15:59:52.804 NULL 1969-12-10 06:06:48.5 1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:03.136 NULL NULL -8 -7196 NULL -1849991500 -8.0 -7196.0 NULL 0 1969-12-31 16:00:03.136 NULL NULL NULL -8.0 NULL -0.9893582466233818 NULL
-true true NULL true true true NULL false true NULL 20 15601 NULL -362433250 20 15601 NULL -15 NULL NULL 20 20 20 20.0 15601.0 NULL -3.6243325E8 20.0 15601.0 NULL -14.871 NULL NULL NULL 15601.0 1969-12-31 16:00:00.02 1969-12-31 16:00:15.601 NULL 1969-12-27 11:19:26.75 1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 15:59:45.129 NULL NULL 20 15601 NULL -362433250 20.0 15601.0 NULL 0 1969-12-31 15:59:45.129 NULL NULL NULL 20.0 NULL 0.9129452507276277 NULL
-true true NULL true true true NULL false true NULL 48 15601 NULL -795361000 48 15601 NULL -10 NULL NULL 48 48 48 48.0 15601.0 NULL -7.95361E8 48.0 15601.0 NULL -9.765 NULL NULL NULL 15601.0 1969-12-31 16:00:00.048 1969-12-31 16:00:15.601 NULL 1969-12-22 11:03:59 1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 15:59:50.235 NULL NULL 48 15601 NULL -795361000 48.0 15601.0 NULL 0 1969-12-31 15:59:50.235 NULL NULL NULL 48.0 NULL -0.7682546613236668 NULL
-true true NULL true true true NULL false true NULL 5 -7196 NULL -1015607500 5 -7196 NULL 10 NULL NULL 5 5 5 5.0 -7196.0 NULL -1.0156075E9 5.0 -7196.0 NULL 10.973 NULL NULL NULL -7196.0 1969-12-31 16:00:00.005 1969-12-31 15:59:52.804 NULL 1969-12-19 21:53:12.5 1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:10.973 NULL NULL 5 -7196 NULL -1015607500 5.0 -7196.0 NULL 0 1969-12-31 16:00:10.973 NULL NULL NULL 5.0 NULL -0.9589242746631385 NULL
-true true NULL true true true NULL false true NULL 59 -7196 NULL -1137754500 59 -7196 NULL 10 NULL NULL 59 59 59 59.0 -7196.0 NULL -1.1377545E9 59.0 -7196.0 NULL 10.956 NULL NULL NULL -7196.0 1969-12-31 16:00:00.059 1969-12-31 15:59:52.804 NULL 1969-12-18 11:57:25.5 1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 00:00:00 1969-12-31 16:00:10.956 NULL NULL 59 -7196 NULL -1137754500 59.0 -7196.0 NULL 0 1969-12-31 16:00:10.956 NULL NULL NULL 59.0 NULL 0.6367380071391379 NULL
+true NULL true true true NULL false false true true -51 NULL 773600971 1053923250 -51 NULL 0 -28792 NULL 2 -51 -51 -51 -51.0 NULL 7.73600971E8 1.05392325E9 -51.0 NULL 0.0 -28791.549 NULL 2.0 7.7360096E8 NULL 1969-12-31 23:59:59.949 NULL 1970-01-09 22:53:20.971 1970-01-13 04:45:23.25 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:08.451 NULL NULL -51 NULL 773600971 1053923250 -51.0 NULL FALSE 0 1969-12-31 16:00:08.451 2yK4Bx76O 2yK4Bx76O 2yK4Bx76O -51.0 1.547201942E9 -0.6702291758433747 7.7360096E8
+true NULL true true true NULL false false true true 8 NULL -102936434 -1312782750 8 NULL 0 -28785 NULL NULL 8 8 8 8.0 NULL -1.02936434E8 -1.31278275E9 8.0 NULL 0.0 -28784.108 NULL NULL -1.02936432E8 NULL 1970-01-01 00:00:00.008 NULL 1969-12-30 19:24:23.566 1969-12-16 19:20:17.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -102936434 -1312782750 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 eJROSNhugc3kQR7Pb eJROSNhugc eJROSNhugc 8.0 -2.05872868E8 0.9893582466233818 -1.02936432E8
+true NULL true true true NULL false false true true 8 NULL -661621138 -931392750 8 NULL 0 -28785 NULL NULL 8 8 8 8.0 NULL -6.61621138E8 -9.3139275E8 8.0 NULL 0.0 -28784.108 NULL NULL -6.6162112E8 NULL 1970-01-01 00:00:00.008 NULL 1969-12-24 08:12:58.862 1969-12-21 05:16:47.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -661621138 -931392750 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 L15l8i5k558tBcDV20 L15l8i5k55 L15l8i5k55 8.0 -1.323242276E9 0.9893582466233818 -6.6162112E8
+true NULL true true true NULL false false true true 8 NULL -669632311 1588591250 8 NULL 0 -28785 NULL 3 8 8 8 8.0 NULL -6.69632311E8 1.58859125E9 8.0 NULL 0.0 -28784.108 NULL 3.0 -6.6963232E8 NULL 1970-01-01 00:00:00.008 NULL 1969-12-24 05:59:27.689 1970-01-19 09:16:31.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -669632311 1588591250 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 3r3sDvfUkG0yTP3LnX5mNQRr 3r3sDvfUkG 3r3sDvfUkG 8.0 -1.339264622E9 0.9893582466233818 -6.6963232E8
+true NULL true true true NULL false false true true 8 NULL 805179664 868161500 8 NULL 0 -28785 NULL NULL 8 8 8 8.0 NULL 8.05179664E8 8.681615E8 8.0 NULL 0.0 -28784.108 NULL NULL 8.0517965E8 NULL 1970-01-01 00:00:00.008 NULL 1970-01-10 07:39:39.664 1970-01-11 01:09:21.5 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 805179664 868161500 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 e005B5q e005B5q e005B5q 8.0 1.610359328E9 0.9893582466233818 8.05179648E8
+true NULL true true true NULL true false true true -51 NULL 747553882 -1930467250 -51 NULL 1 -28792 NULL NULL -51 -51 -51 -51.0 NULL 7.47553882E8 -1.93046725E9 -51.0 NULL 1.0 -28791.549 NULL NULL 7.4755386E8 NULL 1969-12-31 23:59:59.949 NULL 1970-01-09 15:39:13.882 1969-12-09 15:45:32.75 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:08.451 NULL NULL -51 NULL 747553882 -1930467250 -51.0 NULL TRUE 0 1969-12-31 16:00:08.451 q8M86Fx0r q8M86Fx0r q8M86Fx0r -51.0 1.495107764E9 -0.6702291758433747 7.47553857E8
+true NULL true true true NULL true false true true 11 NULL -335450417 1233327000 11 NULL 1 -28798 NULL NULL 11 11 11 11.0 NULL -3.35450417E8 1.233327E9 11.0 NULL 1.0 -28797.649 NULL NULL -3.35450432E8 NULL 1970-01-01 00:00:00.011 NULL 1969-12-28 02:49:09.583 1970-01-15 06:35:27 1970-01-01 00:00:11 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:02.351 NULL NULL 11 NULL -335450417 1233327000 11.0 NULL TRUE 0 1969-12-31 16:00:02.351 dOYnqgaXoJ1P3ERwxe5N7 dOYnqgaXoJ dOYnqgaXoJ 11.0 -6.70900834E8 -0.9999902065507035 -3.35450431E8
+true NULL true true true NULL true false true true 11 NULL -64615982 1803053750 11 NULL 1 -28798 NULL 8 11 11 11 11.0 NULL -6.4615982E7 1.80305375E9 11.0 NULL 1.0 -28797.649 NULL 8.0 -6.4615984E7 NULL 1970-01-01 00:00:00.011 NULL 1969-12-31 06:03:04.018 1970-01-21 20:50:53.75 1970-01-01 00:00:11 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:02.351 NULL NULL 11 NULL -64615982 1803053750 11.0 NULL TRUE 0 1969-12-31 16:00:02.351 8J5OB7K26PEV7kdbeHr3 8J5OB7K26P 8J5OB7K26P 11.0 -1.29231964E8 -0.9999902065507035 -6.4615983E7
+true NULL true true true NULL true false true true 8 NULL 890988972 -1862301000 8 NULL 1 -28785 NULL NULL 8 8 8 8.0 NULL 8.90988972E8 -1.862301E9 8.0 NULL 1.0 -28784.108 NULL NULL 8.9098899E8 NULL 1970-01-01 00:00:00.008 NULL 1970-01-11 07:29:48.972 1969-12-10 10:41:39 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 890988972 -1862301000 8.0 NULL TRUE 0 1969-12-31 16:00:15.892 XylAH4 XylAH4 XylAH4 8.0 1.781977944E9 0.9893582466233818 8.90988993E8
+true NULL true true true NULL true false true true 8 NULL 930867246 1205399250 8 NULL 1 -28785 NULL NULL 8 8 8 8.0 NULL 9.30867246E8 1.20539925E9 8.0 NULL 1.0 -28784.108 NULL NULL 9.3086726E8 NULL 1970-01-01 00:00:00.008 NULL 1970-01-11 18:34:27.246 1970-01-14 22:49:59.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 930867246 1205399250 8.0 NULL TRUE 0 1969-12-31 16:00:15.892 c1V8o1A c1V8o1A c1V8o1A 8.0 1.861734492E9 0.9893582466233818 9.30867265E8
+true true NULL true true true NULL false true NULL -14 -7196 NULL -1552199500 -14 -7196 NULL -28789 NULL NULL -14 -14 -14 -14.0 -7196.0 NULL -1.5521995E9 -14.0 -7196.0 NULL -28788.935 NULL NULL NULL -7196.0 1969-12-31 23:59:59.986 1969-12-31 23:59:52.804 NULL 1969-12-14 00:50:00.5 1969-12-31 23:59:46 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:11.065 NULL NULL -14 -7196 NULL -1552199500 -14.0 -7196.0 NULL 0 1969-12-31 16:00:11.065 NULL NULL NULL -14.0 NULL -0.9906073556948704 NULL
+true true NULL true true true NULL false true NULL -21 -7196 NULL 1542429000 -21 -7196 NULL -28805 NULL NULL -21 -21 -21 -21.0 -7196.0 NULL 1.542429E9 -21.0 -7196.0 NULL -28804.1 NULL NULL NULL -7196.0 1969-12-31 23:59:59.979 1969-12-31 23:59:52.804 NULL 1970-01-18 20:27:09 1969-12-31 23:59:39 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 15:59:55.9 NULL NULL -21 -7196 NULL 1542429000 -21.0 -7196.0 NULL 0 1969-12-31 15:59:55.9 NULL NULL NULL -21.0 NULL -0.8366556385360561 NULL
+true true NULL true true true NULL false true NULL -24 -7196 NULL 829111000 -24 -7196 NULL -28807 NULL NULL -24 -24 -24 -24.0 -7196.0 NULL 8.29111E8 -24.0 -7196.0 NULL -28806.855 NULL NULL NULL -7196.0 1969-12-31 23:59:59.976 1969-12-31 23:59:52.804 NULL 1970-01-10 14:18:31 1969-12-31 23:59:36 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 15:59:53.145 NULL NULL -24 -7196 NULL 829111000 -24.0 -7196.0 NULL 0 1969-12-31 15:59:53.145 NULL NULL NULL -24.0 NULL 0.9055783620066238 NULL
+true true NULL true true true NULL false true NULL -30 -200 NULL 1429852250 -30 -200 NULL -28788 NULL NULL -30 -30 -30 -30.0 -200.0 NULL 1.42985225E9 -30.0 -200.0 NULL -28787.065 NULL NULL NULL -200.0 1969-12-31 23:59:59.97 1969-12-31 23:59:59.8 NULL 1970-01-17 13:10:52.25 1969-12-31 23:59:30 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:12.935 NULL NULL -30 -200 NULL 1429852250 -30.0 -200.0 NULL 0 1969-12-31 16:00:12.935 NULL NULL NULL -30.0 NULL 0.9880316240928618 NULL
+true true NULL true true true NULL false true NULL -36 -200 NULL -2006216750 -36 -200 NULL -28815 NULL NULL -36 -36 -36 -36.0 -200.0 NULL -2.00621675E9 -36.0 -200.0 NULL -28814.252 NULL NULL NULL -200.0 1969-12-31 23:59:59.964 1969-12-31 23:59:59.8 NULL 1969-12-08 18:43:03.25 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 15:59:45.748 NULL NULL -36 -200 NULL -2006216750 -36.0 -200.0 NULL 0 1969-12-31 15:59:45.748 NULL NULL NULL -36.0 NULL 0.9917788534431158 NULL
+true true NULL true true true NULL false true NULL -36 -200 NULL 1599879000 -36 -200 NULL -28807 NULL NULL -36 -36 -36 -36.0 -200.0 NULL 1.599879E9 -36.0 -200.0 NULL -28806.183 NULL NULL NULL -200.0 1969-12-31 23:59:59.964 1969-12-31 23:59:59.8 NULL 1970-01-19 12:24:39 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 15:59:53.817 NULL NULL -36 -200 NULL 1599879000 -36.0 -200.0 NULL 0 1969-12-31 15:59:53.817 NULL NULL NULL -36.0 NULL 0.9917788534431158 NULL
+true true NULL true true true NULL false true NULL -38 15601 NULL -1858689000 -38 15601 NULL -28802 NULL NULL -38 -38 -38 -38.0 15601.0 NULL -1.858689E9 -38.0 15601.0 NULL -28801.386 NULL NULL NULL 15601.0 1969-12-31 23:59:59.962 1970-01-01 00:00:15.601 NULL 1969-12-10 11:41:51 1969-12-31 23:59:22 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 15:59:58.614 NULL NULL -38 15601 NULL -1858689000 -38.0 15601.0 NULL 0 1969-12-31 15:59:58.614 NULL NULL NULL -38.0 NULL -0.2963685787093853 NULL
+true true NULL true true true NULL false true NULL -5 15601 NULL 612416000 -5 15601 NULL -28796 NULL NULL -5 -5 -5 -5.0 15601.0 NULL 6.12416E8 -5.0 15601.0 NULL -28795.321 NULL NULL NULL 15601.0 1969-12-31 23:59:59.995 1970-01-01 00:00:15.601 NULL 1970-01-08 02:06:56 1969-12-31 23:59:55 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:04.679 NULL NULL -5 15601 NULL 612416000 -5.0 15601.0 NULL 0 1969-12-31 16:00:04.679 NULL NULL NULL -5.0 NULL 0.9589242746631385 NULL
+true true NULL true true true NULL false true NULL -50 -7196 NULL -1031187250 -50 -7196 NULL -28806 NULL NULL -50 -50 -50 -50.0 -7196.0 NULL -1.03118725E9 -50.0 -7196.0 NULL -28805.267 NULL NULL NULL -7196.0 1969-12-31 23:59:59.95 1969-12-31 23:59:52.804 NULL 1969-12-20 01:33:32.75 1969-12-31 23:59:10 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 15:59:54.733 NULL NULL -50 -7196 NULL -1031187250 -50.0 -7196.0 NULL 0 1969-12-31 15:59:54.733 NULL NULL NULL -50.0 NULL 0.26237485370392877 NULL
+true true NULL true true true NULL false true NULL -59 -7196 NULL -1604890000 -59 -7196 NULL -28787 NULL NULL -59 -59 -59 -59.0 -7196.0 NULL -1.60489E9 -59.0 -7196.0 NULL -28786.85 NULL NULL NULL -7196.0 1969-12-31 23:59:59.941 1969-12-31 23:59:52.804 NULL 1969-12-13 10:11:50 1969-12-31 23:59:01 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:13.15 NULL NULL -59 -7196 NULL -1604890000 -59.0 -7196.0 NULL 0 1969-12-31 16:00:13.15 NULL NULL NULL -59.0 NULL -0.6367380071391379 NULL
+true true NULL true true true NULL false true NULL -60 -7196 NULL 1516314750 -60 -7196 NULL -28808 NULL NULL -60 -60 -60 -60.0 -7196.0 NULL 1.51631475E9 -60.0 -7196.0 NULL -28807.592 NULL NULL NULL -7196.0 1969-12-31 23:59:59.94 1969-12-31 23:59:52.804 NULL 1970-01-18 13:11:54.75 1969-12-31 23:59:00 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 15:59:52.408 NULL NULL -60 -7196 NULL 1516314750 -60.0 -7196.0 NULL 0 1969-12-31 15:59:52.408 NULL NULL NULL -60.0 NULL 0.3048106211022167 NULL
+true true NULL true true true NULL false true NULL -8 -7196 NULL -1849991500 -8 -7196 NULL -28797 NULL NULL -8 -8 -8 -8.0 -7196.0 NULL -1.8499915E9 -8.0 -7196.0 NULL -28796.864 NULL NULL NULL -7196.0 1969-12-31 23:59:59.992 1969-12-31 23:59:52.804 NULL 1969-12-10 14:06:48.5 1969-12-31 23:59:52 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:03.136 NULL NULL -8 -7196 NULL -1849991500 -8.0 -7196.0 NULL 0 1969-12-31 16:00:03.136 NULL NULL NULL -8.0 NULL -0.9893582466233818 NULL
+true true NULL true true true NULL false true NULL 20 15601 NULL -362433250 20 15601 NULL -28815 NULL NULL 20 20 20 20.0 15601.0 NULL -3.6243325E8 20.0 15601.0 NULL -28814.871 NULL NULL NULL 15601.0 1970-01-01 00:00:00.02 1970-01-01 00:00:15.601 NULL 1969-12-27 19:19:26.75 1970-01-01 00:00:20 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 15:59:45.129 NULL NULL 20 15601 NULL -362433250 20.0 15601.0 NULL 0 1969-12-31 15:59:45.129 NULL NULL NULL 20.0 NULL 0.9129452507276277 NULL
+true true NULL true true true NULL false true NULL 48 15601 NULL -795361000 48 15601 NULL -28810 NULL NULL 48 48 48 48.0 15601.0 NULL -7.95361E8 48.0 15601.0 NULL -28809.765 NULL NULL NULL 15601.0 1970-01-01 00:00:00.048 1970-01-01 00:00:15.601 NULL 1969-12-22 19:03:59 1970-01-01 00:00:48 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 15:59:50.235 NULL NULL 48 15601 NULL -795361000 48.0 15601.0 NULL 0 1969-12-31 15:59:50.235 NULL NULL NULL 48.0 NULL -0.7682546613236668 NULL
+true true NULL true true true NULL false true NULL 5 -7196 NULL -1015607500 5 -7196 NULL -28790 NULL NULL 5 5 5 5.0 -7196.0 NULL -1.0156075E9 5.0 -7196.0 NULL -28789.027 NULL NULL NULL -7196.0 1970-01-01 00:00:00.005 1969-12-31 23:59:52.804 NULL 1969-12-20 05:53:12.5 1970-01-01 00:00:05 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:10.973 NULL NULL 5 -7196 NULL -1015607500 5.0 -7196.0 NULL 0 1969-12-31 16:00:10.973 NULL NULL NULL 5.0 NULL -0.9589242746631385 NULL
+true true NULL true true true NULL false true NULL 59 -7196 NULL -1137754500 59 -7196 NULL -28790 NULL NULL 59 59 59 59.0 -7196.0 NULL -1.1377545E9 59.0 -7196.0 NULL -28789.044 NULL NULL NULL -7196.0 1970-01-01 00:00:00.059 1969-12-31 23:59:52.804 NULL 1969-12-18 19:57:25.5 1970-01-01 00:00:59 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 00:00:00 1969-12-31 16:00:10.956 NULL NULL 59 -7196 NULL -1137754500 59.0 -7196.0 NULL 0 1969-12-31 16:00:10.956 NULL NULL NULL 59.0 NULL 0.6367380071391379 NULL
diff --git a/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
index 688d0ed20a..6c2d8cce57 100644
--- a/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_date_funcs.q.out
@@ -262,7 +262,7 @@ STAGE PLANS:
TableScan Vectorization:
native: true
Select Operator
- expressions: fl_time (type: timestamp), to_unix_timestamp(fl_time) (type: bigint), year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: int), dayofmonth(fl_time) (type: int), dayofweek(fl_time) (type: int), weekofyear(fl_time) (type: int), CAST( fl_time AS DATE) (type: date), to_date(fl_time) (type: date), date_add(fl_time, 2) (type: date), date_sub(fl_time, 2) (type: date), datediff(fl_time, '2000-01-01') (type: int), datediff(fl_time, DATE'2000-01-01') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00.0') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09.0') (type: int), datediff(fl_time, '2007-03-14') (type: int), datediff(fl_time, DATE'2007-03-14') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00.0') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59.0') (type: int)
+ expressions: fl_time (type: timestamp), to_unix_timestamp(fl_time) (type: bigint), year(fl_time) (type: int), month(fl_time) (type: int), day(fl_time) (type: int), day(fl_time) (type: int), dayofweek(fl_time) (type: int), weekofyear(fl_time) (type: int), CAST( fl_time AS DATE) (type: date), to_date(fl_time) (type: date), date_add(fl_time, 2) (type: date), date_sub(fl_time, 2) (type: date), datediff(fl_time, '2000-01-01') (type: int), datediff(fl_time, DATE'2000-01-01') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00') (type: int), datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09') (type: int), datediff(fl_time, '2007-03-14') (type: int), datediff(fl_time, DATE'2007-03-14') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00') (type: int), datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59') (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
Select Vectorization:
className: VectorSelectOperator
@@ -348,143 +348,143 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@date_udf_flight_orc
#### A masked pattern was here ####
fl_time _c1 _c2 _c3 _c4 _c5 _c6 _c7 _c8 _c9 _c10 _c11 _c12 _c13 _c14 _c15 _c16 _c17 _c18 _c19
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 07:00:00 1287583200 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 07:00:00 1287669600 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 07:00:00 1287756000 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 07:00:00 1287842400 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 07:00:00 1287928800 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 07:00:00 1288015200 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 07:00:00 1288101600 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 07:00:00 1288188000 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 07:00:00 1288274400 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 07:00:00 1288360800 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 07:00:00 1288447200 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-31 07:00:00 1288533600 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 07:00:00 1288533600 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 07:00:00 1288533600 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 07:00:00 1288533600 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 07:00:00 1288533600 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 07:00:00 1288533600 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 07:00:00 1288533600 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 07:00:00 1288533600 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 07:00:00 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 07:00:00 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 07:00:00 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 07:00:00 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 07:00:00 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 07:00:00 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 07:00:00 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 07:00:00 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 07:00:00 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 07:00:00 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 07:00:00 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-31 07:00:00 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 07:00:00 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 07:00:00 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 07:00:00 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 07:00:00 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 07:00:00 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 07:00:00 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 07:00:00 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT
fl_date,
to_unix_timestamp(fl_date),
@@ -550,7 +550,7 @@ STAGE PLANS:
TableScan Vectorization:
native: true
Select Operator
- expressions: fl_date (type: date), to_unix_timestamp(fl_date) (type: bigint), year(fl_date) (type: int), month(fl_date) (type: int), day(fl_date) (type: int), dayofmonth(fl_date) (type: int), dayofweek(fl_date) (type: int), weekofyear(fl_date) (type: int), fl_date (type: date), to_date(fl_date) (type: date), date_add(fl_date, 2) (type: date), date_sub(fl_date, 2) (type: date), datediff(fl_date, '2000-01-01') (type: int), datediff(fl_date, DATE'2000-01-01') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00.0') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 11:13:09.0') (type: int), datediff(fl_date, '2007-03-14') (type: int), datediff(fl_date, DATE'2007-03-14') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00.0') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59.0') (type: int)
+ expressions: fl_date (type: date), to_unix_timestamp(fl_date) (type: bigint), year(fl_date) (type: int), month(fl_date) (type: int), day(fl_date) (type: int), day(fl_date) (type: int), dayofweek(fl_date) (type: int), weekofyear(fl_date) (type: int), fl_date (type: date), to_date(fl_date) (type: date), date_add(fl_date, 2) (type: date), date_sub(fl_date, 2) (type: date), datediff(fl_date, '2000-01-01') (type: int), datediff(fl_date, DATE'2000-01-01') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00') (type: int), datediff(fl_date, TIMESTAMP'2000-01-01 11:13:09') (type: int), datediff(fl_date, '2007-03-14') (type: int), datediff(fl_date, DATE'2007-03-14') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00') (type: int), datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59') (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
Select Vectorization:
className: VectorSelectOperator
@@ -636,143 +636,143 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@date_udf_flight_orc
#### A masked pattern was here ####
fl_date _c1 _c2 _c3 _c4 _c5 _c6 _c7 _c8 _c9 _c10 _c11 _c12 _c13 _c14 _c15 _c16 _c17 _c18 _c19
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-20 1287558000 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-21 1287644400 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-22 1287730800 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-23 1287817200 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-24 1287903600 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-25 1287990000 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-26 1288076400 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-27 1288162800 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-28 1288249200 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-29 1288335600 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-30 1288422000 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
-2010-10-31 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
-2010-10-31 1288508400 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-20 1287532800 2010 10 20 20 4 42 2010-10-20 2010-10-20 2010-10-22 2010-10-18 3945 3945 3945 3945 1316 1316 1316 1316
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-21 1287619200 2010 10 21 21 5 42 2010-10-21 2010-10-21 2010-10-23 2010-10-19 3946 3946 3946 3946 1317 1317 1317 1317
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-22 1287705600 2010 10 22 22 6 42 2010-10-22 2010-10-22 2010-10-24 2010-10-20 3947 3947 3947 3947 1318 1318 1318 1318
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-23 1287792000 2010 10 23 23 7 42 2010-10-23 2010-10-23 2010-10-25 2010-10-21 3948 3948 3948 3948 1319 1319 1319 1319
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-24 1287878400 2010 10 24 24 1 42 2010-10-24 2010-10-24 2010-10-26 2010-10-22 3949 3949 3949 3949 1320 1320 1320 1320
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-25 1287964800 2010 10 25 25 2 43 2010-10-25 2010-10-25 2010-10-27 2010-10-23 3950 3950 3950 3950 1321 1321 1321 1321
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-26 1288051200 2010 10 26 26 3 43 2010-10-26 2010-10-26 2010-10-28 2010-10-24 3951 3951 3951 3951 1322 1322 1322 1322
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-27 1288137600 2010 10 27 27 4 43 2010-10-27 2010-10-27 2010-10-29 2010-10-25 3952 3952 3952 3952 1323 1323 1323 1323
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-28 1288224000 2010 10 28 28 5 43 2010-10-28 2010-10-28 2010-10-30 2010-10-26 3953 3953 3953 3953 1324 1324 1324 1324
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-29 1288310400 2010 10 29 29 6 43 2010-10-29 2010-10-29 2010-10-31 2010-10-27 3954 3954 3954 3954 1325 1325 1325 1325
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-30 1288396800 2010 10 30 30 7 43 2010-10-30 2010-10-30 2010-11-01 2010-10-28 3955 3955 3955 3955 1326 1326 1326 1326
+2010-10-31 1288483200 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 1288483200 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 1288483200 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 1288483200 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 1288483200 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 1288483200 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 1288483200 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
+2010-10-31 1288483200 2010 10 31 31 1 43 2010-10-31 2010-10-31 2010-11-02 2010-10-29 3956 3956 3956 3956 1327 1327 1327 1327
PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION SELECT
fl_time,
fl_date,
@@ -842,7 +842,7 @@ STAGE PLANS:
TableScan Vectorization:
native: true
Select Operator
- expressions: fl_time (type: timestamp), fl_date (type: date), (year(fl_time) = year(fl_date)) (type: boolean), (month(fl_time) = month(fl_date)) (type: boolean), (day(fl_time) = day(fl_date)) (type: boolean), (dayofmonth(fl_time) = dayofmonth(fl_date)) (type: boolean), (dayofweek(fl_time) = dayofweek(fl_date)) (type: boolean), (weekofyear(fl_time) = weekofyear(fl_date)) (type: boolean), (CAST( fl_time AS DATE) = fl_date) (type: boolean), (to_date(fl_time) = to_date(fl_date)) (type: boolean), (date_add(fl_time, 2) = date_add(fl_date, 2)) (type: boolean), (date_sub(fl_time, 2) = date_sub(fl_date, 2)) (type: boolean), (datediff(fl_time, '2000-01-01') = datediff(fl_date, '2000-01-01')) (type: boolean), (datediff(fl_time, DATE'2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00.0') = datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00.0')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09.0') = datediff(fl_date, TIMESTAMP'2000-01-01 11:13:09.0')) (type: boolean), (datediff(fl_time, '2007-03-14') = datediff(fl_date, '2007-03-14')) (type: boolean), (datediff(fl_time, DATE'2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00.0') = datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00.0')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59.0') = datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59.0')) (type: boolean), (datediff(fl_date, '2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_date, '2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean)
+ expressions: fl_time (type: timestamp), fl_date (type: date), (year(fl_time) = year(fl_date)) (type: boolean), (month(fl_time) = month(fl_date)) (type: boolean), (day(fl_time) = day(fl_date)) (type: boolean), (day(fl_time) = day(fl_date)) (type: boolean), (dayofweek(fl_time) = dayofweek(fl_date)) (type: boolean), (weekofyear(fl_time) = weekofyear(fl_date)) (type: boolean), (CAST( fl_time AS DATE) = fl_date) (type: boolean), (to_date(fl_time) = to_date(fl_date)) (type: boolean), (date_add(fl_time, 2) = date_add(fl_date, 2)) (type: boolean), (date_sub(fl_time, 2) = date_sub(fl_date, 2)) (type: boolean), (datediff(fl_time, '2000-01-01') = datediff(fl_date, '2000-01-01')) (type: boolean), (datediff(fl_time, DATE'2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 00:00:00') = datediff(fl_date, TIMESTAMP'2000-01-01 00:00:00')) (type: boolean), (datediff(fl_time, TIMESTAMP'2000-01-01 11:13:09') = datediff(fl_date, TIMESTAMP'2000-01-01 11:13:09')) (type: boolean), (datediff(fl_time, '2007-03-14') = datediff(fl_date, '2007-03-14')) (type: boolean), (datediff(fl_time, DATE'2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 00:00:00') = datediff(fl_date, TIMESTAMP'2007-03-14 00:00:00')) (type: boolean), (datediff(fl_time, TIMESTAMP'2007-03-14 08:21:59') = datediff(fl_date, TIMESTAMP'2007-03-14 08:21:59')) (type: boolean), (datediff(fl_date, '2000-01-01') = datediff(fl_date, DATE'2000-01-01')) (type: boolean), (datediff(fl_date, '2007-03-14') = datediff(fl_date, DATE'2007-03-14')) (type: boolean)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21
Select Vectorization:
className: VectorSelectOperator
diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
index 7e03bf386b..8303372f57 100644
--- a/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_timestamp.q.out
@@ -76,8 +76,13 @@ POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test_n2
POSTHOOK: type: QUERY
POSTHOOK: Input: default@test_n2
#### A masked pattern was here ####
+<<<<<<< HEAD
0001-01-01 00:00:00 9999-12-31 23:59:59.999999999 3652060 23:59:59.999999999
PREHOOK: query: SELECT ts FROM test_n2 WHERE ts IN (timestamp '0001-01-01 00:00:00.000000000', timestamp '0002-02-02 00:00:00.000000000')
+=======
+0001-01-01 00:00:00 9999-12-31 23:59:59.999999999 3652058 23:59:59.999999999
+PREHOOK: query: SELECT ts FROM test WHERE ts IN (timestamp '0001-01-01 00:00:00.000000000', timestamp '0002-02-02 00:00:00.000000000')
+>>>>>>> HIVE-12192
PREHOOK: type: QUERY
PREHOOK: Input: default@test_n2
#### A masked pattern was here ####
@@ -201,7 +206,7 @@ POSTHOOK: query: SELECT MIN(ts), MAX(ts), MAX(ts) - MIN(ts) FROM test_n2
POSTHOOK: type: QUERY
POSTHOOK: Input: default@test_n2
#### A masked pattern was here ####
-0001-01-01 00:00:00 9999-12-31 23:59:59.999999999 3652060 23:59:59.999999999
+0001-01-01 00:00:00 9999-12-31 23:59:59.999999999 3652058 23:59:59.999999999
PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
SELECT ts FROM test_n2 WHERE ts IN (timestamp '0001-01-01 00:00:00.000000000', timestamp '0002-02-02 00:00:00.000000000')
PREHOOK: type: QUERY
@@ -230,8 +235,8 @@ STAGE PLANS:
Filter Vectorization:
className: VectorFilterOperator
native: true
- predicateExpression: FilterTimestampColumnInList(col 0:timestamp, values [0001-01-01 00:00:00.0, 0002-02-02 00:00:00.0])
- predicate: (ts) IN (TIMESTAMP'0001-01-01 00:00:00.0', TIMESTAMP'0002-02-02 00:00:00.0') (type: boolean)
+ predicateExpression: FilterTimestampColumnInList(col 0:timestamp, values [0001-01-02 16:00:00.0, 0002-02-03 16:00:00.0])
+ predicate: (ts) IN (TIMESTAMP'0001-01-01 00:00:00', TIMESTAMP'0002-02-02 00:00:00') (type: boolean)
Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: ts (type: timestamp)
@@ -388,7 +393,7 @@ POSTHOOK: query: SELECT AVG(ts), CAST(AVG(ts) AS TIMESTAMP) FROM test_n2
POSTHOOK: type: QUERY
POSTHOOK: Input: default@test_n2
#### A masked pattern was here ####
-9.56332944E10 5000-07-01 13:00:00
+9.5633352E10 5000-07-02 12:00:00
PREHOOK: query: EXPLAIN VECTORIZATION DETAIL
SELECT variance(ts), var_pop(ts), var_samp(ts), std(ts), stddev(ts), stddev_pop(ts), stddev_samp(ts) FROM test_n2
PREHOOK: type: QUERY
@@ -495,4 +500,4 @@ POSTHOOK: query: SELECT variance(ts), var_pop(ts), var_samp(ts), std(ts), stddev
POSTHOOK: type: QUERY
POSTHOOK: Input: default@test_n2
#### A masked pattern was here ####
-2.489106846793884E22 2.489106846793884E22 4.978213693587768E22 1.577690352E11 1.577690352E11 1.577690352E11 2.2311910930235822E11
+2.4891041205457024E22 2.4891041205457024E22 4.978208241091405E22 1.577689488E11 1.577689488E11 1.577689488E11 2.2311898711430646E11
diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out
index f12a2b66a5..d3d394697e 100644
--- a/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_timestamp_ints_casts.q.out
@@ -132,32 +132,32 @@ where cbigint % 250 = 0
POSTHOOK: type: QUERY
POSTHOOK: Input: default@alltypesorc
#### A masked pattern was here ####
-1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1969-12-08 10:43:03.25 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.748 NULL NULL
-1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1970-01-19 04:24:39 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.817 NULL NULL
-1969-12-31 15:59:59.97 1969-12-31 15:59:59.8 NULL 1970-01-17 05:10:52.25 1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 16:00:12.935 NULL NULL
-1969-12-31 15:59:59.949 NULL 1970-01-09 14:53:20.971 1970-01-12 20:45:23.25 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL
-1969-12-31 15:59:59.949 NULL 1970-01-09 07:39:13.882 1969-12-09 07:45:32.75 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL
-1969-12-31 16:00:00.02 1969-12-31 16:00:15.601 NULL 1969-12-27 11:19:26.75 1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.129 NULL NULL
-1969-12-31 15:59:59.962 1969-12-31 16:00:15.601 NULL 1969-12-10 03:41:51 1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:58.614 NULL NULL
-1969-12-31 15:59:59.995 1969-12-31 16:00:15.601 NULL 1970-01-07 18:06:56 1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 16:00:04.679 NULL NULL
-1969-12-31 16:00:00.048 1969-12-31 16:00:15.601 NULL 1969-12-22 11:03:59 1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:50.235 NULL NULL
-1969-12-31 16:00:00.008 NULL 1969-12-24 00:12:58.862 1969-12-20 21:16:47.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 16:00:00.008 NULL 1969-12-30 11:24:23.566 1969-12-16 11:20:17.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 16:00:00.008 NULL 1970-01-09 23:39:39.664 1970-01-10 17:09:21.5 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 16:00:00.008 NULL 1969-12-23 21:59:27.689 1970-01-19 01:16:31.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 16:00:00.008 NULL 1970-01-10 23:29:48.972 1969-12-10 02:41:39 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 16:00:00.008 NULL 1970-01-11 10:34:27.246 1970-01-14 14:49:59.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 15:59:59.941 1969-12-31 15:59:52.804 NULL 1969-12-13 02:11:50 1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:13.15 NULL NULL
-1969-12-31 15:59:59.979 1969-12-31 15:59:52.804 NULL 1970-01-18 12:27:09 1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:55.9 NULL NULL
-1969-12-31 15:59:59.94 1969-12-31 15:59:52.804 NULL 1970-01-18 05:11:54.75 1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:52.408 NULL NULL
-1969-12-31 15:59:59.986 1969-12-31 15:59:52.804 NULL 1969-12-13 16:50:00.5 1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:11.065 NULL NULL
-1969-12-31 16:00:00.059 1969-12-31 15:59:52.804 NULL 1969-12-18 11:57:25.5 1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.956 NULL NULL
-1969-12-31 15:59:59.992 1969-12-31 15:59:52.804 NULL 1969-12-10 06:06:48.5 1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:03.136 NULL NULL
-1969-12-31 16:00:00.005 1969-12-31 15:59:52.804 NULL 1969-12-19 21:53:12.5 1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.973 NULL NULL
-1969-12-31 15:59:59.976 1969-12-31 15:59:52.804 NULL 1970-01-10 06:18:31 1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.145 NULL NULL
-1969-12-31 15:59:59.95 1969-12-31 15:59:52.804 NULL 1969-12-19 17:33:32.75 1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:54.733 NULL NULL
-1969-12-31 16:00:00.011 NULL 1969-12-30 22:03:04.018 1970-01-21 12:50:53.75 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL
-1969-12-31 16:00:00.011 NULL 1969-12-27 18:49:09.583 1970-01-14 22:35:27 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL
+1969-12-31 23:59:59.964 1969-12-31 23:59:59.8 NULL 1969-12-08 18:43:03.25 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 15:59:45.748 NULL NULL
+1969-12-31 23:59:59.964 1969-12-31 23:59:59.8 NULL 1970-01-19 12:24:39 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 15:59:53.817 NULL NULL
+1969-12-31 23:59:59.97 1969-12-31 23:59:59.8 NULL 1970-01-17 13:10:52.25 1969-12-31 23:59:30 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 16:00:12.935 NULL NULL
+1969-12-31 23:59:59.949 NULL 1970-01-09 22:53:20.971 1970-01-13 04:45:23.25 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 16:00:08.451 NULL NULL
+1969-12-31 23:59:59.949 NULL 1970-01-09 15:39:13.882 1969-12-09 15:45:32.75 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1969-12-31 16:00:08.451 NULL NULL
+1970-01-01 00:00:00.02 1970-01-01 00:00:15.601 NULL 1969-12-27 19:19:26.75 1970-01-01 00:00:20 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 15:59:45.129 NULL NULL
+1969-12-31 23:59:59.962 1970-01-01 00:00:15.601 NULL 1969-12-10 11:41:51 1969-12-31 23:59:22 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 15:59:58.614 NULL NULL
+1969-12-31 23:59:59.995 1970-01-01 00:00:15.601 NULL 1970-01-08 02:06:56 1969-12-31 23:59:55 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 16:00:04.679 NULL NULL
+1970-01-01 00:00:00.048 1970-01-01 00:00:15.601 NULL 1969-12-22 19:03:59 1970-01-01 00:00:48 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 15:59:50.235 NULL NULL
+1970-01-01 00:00:00.008 NULL 1969-12-24 08:12:58.862 1969-12-21 05:16:47.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1970-01-01 00:00:00.008 NULL 1969-12-30 19:24:23.566 1969-12-16 19:20:17.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1970-01-01 00:00:00.008 NULL 1970-01-10 07:39:39.664 1970-01-11 01:09:21.5 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1970-01-01 00:00:00.008 NULL 1969-12-24 05:59:27.689 1970-01-19 09:16:31.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1970-01-01 00:00:00.008 NULL 1970-01-11 07:29:48.972 1969-12-10 10:41:39 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1970-01-01 00:00:00.008 NULL 1970-01-11 18:34:27.246 1970-01-14 22:49:59.25 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1969-12-31 23:59:59.941 1969-12-31 23:59:52.804 NULL 1969-12-13 10:11:50 1969-12-31 23:59:01 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 16:00:13.15 NULL NULL
+1969-12-31 23:59:59.979 1969-12-31 23:59:52.804 NULL 1970-01-18 20:27:09 1969-12-31 23:59:39 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 15:59:55.9 NULL NULL
+1969-12-31 23:59:59.94 1969-12-31 23:59:52.804 NULL 1970-01-18 13:11:54.75 1969-12-31 23:59:00 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 15:59:52.408 NULL NULL
+1969-12-31 23:59:59.986 1969-12-31 23:59:52.804 NULL 1969-12-14 00:50:00.5 1969-12-31 23:59:46 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 16:00:11.065 NULL NULL
+1970-01-01 00:00:00.059 1969-12-31 23:59:52.804 NULL 1969-12-18 19:57:25.5 1970-01-01 00:00:59 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 16:00:10.956 NULL NULL
+1969-12-31 23:59:59.992 1969-12-31 23:59:52.804 NULL 1969-12-10 14:06:48.5 1969-12-31 23:59:52 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 16:00:03.136 NULL NULL
+1970-01-01 00:00:00.005 1969-12-31 23:59:52.804 NULL 1969-12-20 05:53:12.5 1970-01-01 00:00:05 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 16:00:10.973 NULL NULL
+1969-12-31 23:59:59.976 1969-12-31 23:59:52.804 NULL 1970-01-10 14:18:31 1969-12-31 23:59:36 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 15:59:53.145 NULL NULL
+1969-12-31 23:59:59.95 1969-12-31 23:59:52.804 NULL 1969-12-20 01:33:32.75 1969-12-31 23:59:10 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 15:59:54.733 NULL NULL
+1970-01-01 00:00:00.011 NULL 1969-12-31 06:03:04.018 1970-01-21 20:50:53.75 1970-01-01 00:00:11 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1969-12-31 16:00:02.351 NULL NULL
+1970-01-01 00:00:00.011 NULL 1969-12-28 02:49:09.583 1970-01-15 06:35:27 1970-01-01 00:00:11 NULL 1970-01-01 00:00:00.001 1970-01-01 00:00:00 1969-12-31 16:00:02.351 NULL NULL
PREHOOK: query: explain vectorization expression
select
@@ -292,29 +292,29 @@ where cbigint % 250 = 0
POSTHOOK: type: QUERY
POSTHOOK: Input: default@alltypesorc
#### A masked pattern was here ####
-1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1906-06-05 13:34:10 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.748 NULL NULL
-1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 2020-09-11 19:50:00 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.817 NULL NULL
-1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 2015-04-23 22:10:50 1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 16:00:12.935 NULL NULL
-1969-12-31 15:59:09 NULL 1994-07-07 10:09:31 2003-05-25 21:27:30 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL
-1969-12-31 15:59:09 NULL 1993-09-08 22:51:22 1908-10-29 07:05:50 1969-12-31 15:59:09 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL
-1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1958-07-07 21:05:50 1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.129 NULL NULL
-1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1911-02-07 01:30:00 1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:58.614 NULL NULL
-1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1989-05-28 20:33:20 1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 16:00:04.679 NULL NULL
-1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1944-10-18 03:23:20 1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:50.235 NULL NULL
-1969-12-31 16:00:08 NULL 1949-01-13 00:21:02 1940-06-26 15:47:30 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 16:00:08 NULL 1966-09-27 07:32:46 1928-05-26 10:07:30 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 16:00:08 NULL 1995-07-07 22:01:04 1997-07-05 20:58:20 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 16:00:08 NULL 1948-10-12 08:01:29 2020-05-04 04:20:50 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 16:00:08 NULL 1998-03-27 00:56:12 1910-12-27 06:10:00 1969-12-31 16:00:08 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 16:00:08 NULL 1999-07-01 15:14:06 2008-03-13 02:07:30 1969-12-31 16:00:08 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL
-1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1919-02-22 13:13:20 1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:13.15 NULL NULL
-1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 2018-11-16 20:30:00 1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:55.9 NULL NULL
-1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 2018-01-18 14:32:30 1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:52.408 NULL NULL
-1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1920-10-24 09:28:20 1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:11.065 NULL NULL
-1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1933-12-12 05:05:00 1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.956 NULL NULL
-1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1911-05-18 17:28:20 1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:03.136 NULL NULL
-1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1937-10-25 22:48:20 1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.973 NULL NULL
-1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1996-04-09 21:36:40 1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.145 NULL NULL
-1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1937-04-28 15:05:50 1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:54.733 NULL NULL
-1969-12-31 16:00:11 NULL 1967-12-14 19:06:58 2027-02-19 08:15:50 1969-12-31 16:00:11 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL
-1969-12-31 16:00:11 NULL 1959-05-16 04:19:43 2009-01-30 06:50:00 1969-12-31 16:00:11 NULL 1969-12-31 16:00:01 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL
+1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1906-06-05 21:34:10 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 15:59:45.748 NULL NULL
+1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 2020-09-12 02:50:00 1969-12-31 23:59:24 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 15:59:53.817 NULL NULL
+1969-12-31 23:59:30 1969-12-31 23:56:40 NULL 2015-04-24 05:10:50 1969-12-31 23:59:30 1969-12-31 23:56:40 NULL 1970-01-01 00:00:00 1969-12-31 16:00:12.935 NULL NULL
+1969-12-31 23:59:09 NULL 1994-07-07 17:09:31 2003-05-26 04:27:30 1969-12-31 23:59:09 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 16:00:08.451 NULL NULL
+1969-12-31 23:59:09 NULL 1993-09-09 05:51:22 1908-10-29 15:05:50 1969-12-31 23:59:09 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1969-12-31 16:00:08.451 NULL NULL
+1970-01-01 00:00:20 1970-01-01 04:20:01 NULL 1958-07-08 04:05:50 1970-01-01 00:00:20 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 15:59:45.129 NULL NULL
+1969-12-31 23:59:22 1970-01-01 04:20:01 NULL 1911-02-07 09:30:00 1969-12-31 23:59:22 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 15:59:58.614 NULL NULL
+1969-12-31 23:59:55 1970-01-01 04:20:01 NULL 1989-05-29 03:33:20 1969-12-31 23:59:55 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 16:00:04.679 NULL NULL
+1970-01-01 00:00:48 1970-01-01 04:20:01 NULL 1944-10-18 10:23:20 1970-01-01 00:00:48 1970-01-01 04:20:01 NULL 1970-01-01 00:00:00 1969-12-31 15:59:50.235 NULL NULL
+1970-01-01 00:00:08 NULL 1949-01-13 08:21:02 1940-06-26 23:47:30 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1970-01-01 00:00:08 NULL 1966-09-27 14:32:46 1928-05-26 18:07:30 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1970-01-01 00:00:08 NULL 1995-07-08 05:01:04 1997-07-06 03:58:20 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1970-01-01 00:00:08 NULL 1948-10-12 15:01:29 2020-05-04 11:20:50 1970-01-01 00:00:08 NULL 1970-01-01 00:00:00 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1970-01-01 00:00:08 NULL 1998-03-27 08:56:12 1910-12-27 14:10:00 1970-01-01 00:00:08 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1970-01-01 00:00:08 NULL 1999-07-01 22:14:06 2008-03-13 09:07:30 1970-01-01 00:00:08 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1969-12-31 16:00:15.892 NULL NULL
+1969-12-31 23:59:01 1969-12-31 22:00:04 NULL 1919-02-22 21:13:20 1969-12-31 23:59:01 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 16:00:13.15 NULL NULL
+1969-12-31 23:59:39 1969-12-31 22:00:04 NULL 2018-11-17 04:30:00 1969-12-31 23:59:39 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 15:59:55.9 NULL NULL
+1969-12-31 23:59:00 1969-12-31 22:00:04 NULL 2018-01-18 22:32:30 1969-12-31 23:59:00 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 15:59:52.408 NULL NULL
+1969-12-31 23:59:46 1969-12-31 22:00:04 NULL 1920-10-24 17:28:20 1969-12-31 23:59:46 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 16:00:11.065 NULL NULL
+1970-01-01 00:00:59 1969-12-31 22:00:04 NULL 1933-12-12 13:05:00 1970-01-01 00:00:59 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 16:00:10.956 NULL NULL
+1969-12-31 23:59:52 1969-12-31 22:00:04 NULL 1911-05-19 01:28:20 1969-12-31 23:59:52 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 16:00:03.136 NULL NULL
+1970-01-01 00:00:05 1969-12-31 22:00:04 NULL 1937-10-26 06:48:20 1970-01-01 00:00:05 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 16:00:10.973 NULL NULL
+1969-12-31 23:59:36 1969-12-31 22:00:04 NULL 1996-04-10 04:36:40 1969-12-31 23:59:36 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 15:59:53.145 NULL NULL
+1969-12-31 23:59:10 1969-12-31 22:00:04 NULL 1937-04-28 23:05:50 1969-12-31 23:59:10 1969-12-31 22:00:04 NULL 1970-01-01 00:00:00 1969-12-31 15:59:54.733 NULL NULL
+1970-01-01 00:00:11 NULL 1967-12-15 03:06:58 2027-02-19 16:15:50 1970-01-01 00:00:11 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1969-12-31 16:00:02.351 NULL NULL
+1970-01-01 00:00:11 NULL 1959-05-16 11:19:43 2009-01-30 14:50:00 1970-01-01 00:00:11 NULL 1970-01-01 00:00:01 1970-01-01 00:00:00 1969-12-31 16:00:02.351 NULL NULL
diff --git a/ql/src/test/results/clientpositive/windowing_distinct.q.out b/ql/src/test/results/clientpositive/windowing_distinct.q.out
index eab1d88705..07bc8a1322 100644
--- a/ql/src/test/results/clientpositive/windowing_distinct.q.out
+++ b/ql/src/test/results/clientpositive/windowing_distinct.q.out
@@ -96,12 +96,12 @@ FROM windowing_distinct
POSTHOOK: type: QUERY
POSTHOOK: Input: default@windowing_distinct
#### A masked pattern was here ####
-54 56.63 0.0 0.0 2.724315837406296E9 57
-54 56.63 0.0 0.0 2.724315837406296E9 57
-54 56.63 0.0 0.0 2.724315837406296E9 57
-235 77.42 0.0 0.0 2.724315837406612E9 69
-235 77.42 0.0 0.0 2.724315837406612E9 69
-235 77.42 0.0 0.0 2.724315837406612E9 69
+54 56.63 0.0 0.0 2.724258237406296E9 57
+54 56.63 0.0 0.0 2.724258237406296E9 57
+54 56.63 0.0 0.0 2.724258237406296E9 57
+235 77.42 0.0 0.0 2.724258237406612E9 69
+235 77.42 0.0 0.0 2.724258237406612E9 69
+235 77.42 0.0 0.0 2.724258237406612E9 69
PREHOOK: query: SELECT AVG(DISTINCT t) OVER (PARTITION BY index),
AVG(DISTINCT d) OVER (PARTITION BY index),
AVG(DISTINCT s) OVER (PARTITION BY index),
@@ -122,12 +122,12 @@ FROM windowing_distinct
POSTHOOK: type: QUERY
POSTHOOK: Input: default@windowing_distinct
#### A masked pattern was here ####
-27.0 28.315 NULL NULL 1.362157918703148E9 28.5000
-27.0 28.315 NULL NULL 1.362157918703148E9 28.5000
-27.0 28.315 NULL NULL 1.362157918703148E9 28.5000
-117.5 38.71 NULL NULL 1.362157918703306E9 34.5000
-117.5 38.71 NULL NULL 1.362157918703306E9 34.5000
-117.5 38.71 NULL NULL 1.362157918703306E9 34.5000
+27.0 28.315 NULL NULL 1.362129118703148E9 28.5000
+27.0 28.315 NULL NULL 1.362129118703148E9 28.5000
+27.0 28.315 NULL NULL 1.362129118703148E9 28.5000
+117.5 38.71 NULL NULL 1.362129118703306E9 34.5000
+117.5 38.71 NULL NULL 1.362129118703306E9 34.5000
+117.5 38.71 NULL NULL 1.362129118703306E9 34.5000
PREHOOK: query: select index, f, count(distinct f) over (partition by index order by f rows between 2 preceding and 1 preceding),
count(distinct f) over (partition by index order by f rows between unbounded preceding and 1 preceding),
count(distinct f) over (partition by index order by f rows between 1 following and 2 following),
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/JsonSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/JsonSerDe.java
index 40b2e8e27f..fe00c3ec9a 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/JsonSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/JsonSerDe.java
@@ -21,8 +21,6 @@
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.CharacterCodingException;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -34,14 +32,13 @@
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
-import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
-import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java b/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java
new file mode 100644
index 0000000000..93605092cf
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/RandomTypeUtil.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
+
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+
+public class RandomTypeUtil {
+
+ public static String getRandString(Random r) {
+ return getRandString(r, null, r.nextInt(10));
+ }
+
+ public static String getRandString(Random r, String characters, int length) {
+ if (characters == null) {
+ characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
+
+ }
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < length; i++) {
+ if (characters == null) {
+ sb.append((char) (r.nextInt(128)));
+ } else {
+ sb.append(characters.charAt(r.nextInt(characters.length())));
+ }
+ }
+ return sb.toString();
+ }
+
+ public static byte[] getRandBinary(Random r, int len){
+ byte[] bytes = new byte[len];
+ for (int j = 0; j < len; j++){
+ bytes[j] = Byte.valueOf((byte) r.nextInt());
+ }
+ return bytes;
+ }
+
+ private static final String DECIMAL_CHARS = "0123456789";
+
+ public static HiveDecimal getRandHiveDecimal(Random r) {
+ int precision;
+ int scale;
+ while (true) {
+ StringBuilder sb = new StringBuilder();
+ precision = 1 + r.nextInt(18);
+ scale = 0 + r.nextInt(precision + 1);
+
+ int integerDigits = precision - scale;
+
+ if (r.nextBoolean()) {
+ sb.append("-");
+ }
+
+ if (integerDigits == 0) {
+ sb.append("0");
+ } else {
+ sb.append(getRandString(r, DECIMAL_CHARS, integerDigits));
+ }
+ if (scale != 0) {
+ sb.append(".");
+ sb.append(getRandString(r, DECIMAL_CHARS, scale));
+ }
+
+ return HiveDecimal.create(sb.toString());
+ }
+ }
+
+ public static Date getRandDate(Random r) {
+ String dateStr = String.format("%d-%02d-%02d",
+ Integer.valueOf(1800 + r.nextInt(500)), // year
+ Integer.valueOf(1 + r.nextInt(12)), // month
+ Integer.valueOf(1 + r.nextInt(28))); // day
+ Date dateVal = Date.valueOf(dateStr);
+ return dateVal;
+ }
+
+ /**
+ * TIMESTAMP.
+ */
+
+ public static final long NANOSECONDS_PER_SECOND = TimeUnit.SECONDS.toNanos(1);
+ public static final long MILLISECONDS_PER_SECOND = TimeUnit.SECONDS.toMillis(1);
+ public static final long NANOSECONDS_PER_MILLISSECOND = TimeUnit.MILLISECONDS.toNanos(1);
+
+ private static final ThreadLocal DATE_FORMAT =
+ new ThreadLocal() {
+ @Override
+ protected DateFormat initialValue() {
+ return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ }
+ };
+
+ // We've switched to Joda/Java Calendar which has a more limited time range....
+ public static final int MIN_YEAR = 1900;
+ public static final int MAX_YEAR = 3000;
+ private static final long MIN_FOUR_DIGIT_YEAR_MILLIS = parseToMillis("1900-01-01 00:00:00");
+ private static final long MAX_FOUR_DIGIT_YEAR_MILLIS = parseToMillis("3000-01-01 00:00:00");
+
+ private static long parseToMillis(String s) {
+ try {
+ return DATE_FORMAT.get().parse(s).getTime();
+ } catch (ParseException ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+
+ public static Timestamp getRandTimestamp(Random r) {
+ return getRandTimestamp(r, MIN_YEAR, MAX_YEAR);
+ }
+
+ public static Timestamp getRandTimestamp(Random r, int minYear, int maxYear) {
+ String optionalNanos = "";
+ switch (r.nextInt(4)) {
+ case 0:
+ // No nanos.
+ break;
+ case 1:
+ optionalNanos = String.format(".%09d",
+ Integer.valueOf(r.nextInt((int) NANOSECONDS_PER_SECOND)));
+ break;
+ case 2:
+ // Limit to milliseconds only...
+ optionalNanos = String.format(".%09d",
+ Integer.valueOf(r.nextInt((int) MILLISECONDS_PER_SECOND)) * NANOSECONDS_PER_MILLISSECOND);
+ break;
+ case 3:
+ // Limit to below milliseconds only...
+ optionalNanos = String.format(".%09d",
+ Integer.valueOf(r.nextInt((int) NANOSECONDS_PER_MILLISSECOND)));
+ break;
+ }
+ String timestampStr = String.format("%04d-%02d-%02d %02d:%02d:%02d%s",
+ Integer.valueOf(minYear + r.nextInt(maxYear - minYear + 1)), // year
+ Integer.valueOf(1 + r.nextInt(12)), // month
+ Integer.valueOf(1 + r.nextInt(28)), // day
+ Integer.valueOf(0 + r.nextInt(24)), // hour
+ Integer.valueOf(0 + r.nextInt(60)), // minute
+ Integer.valueOf(0 + r.nextInt(60)), // second
+ optionalNanos);
+ Timestamp timestampVal;
+ try {
+ timestampVal = Timestamp.valueOf(timestampStr);
+ } catch (Exception e) {
+ System.err.println("Timestamp string " + timestampStr + " did not parse");
+ throw e;
+ }
+ return timestampVal;
+ }
+
+ public static long randomMillis(long minMillis, long maxMillis, Random rand) {
+ return minMillis + (long) ((maxMillis - minMillis) * rand.nextDouble());
+ }
+
+ public static long randomMillis(Random rand) {
+ return randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
+ }
+
+ public static int randomNanos(Random rand, int decimalDigits) {
+ // Only keep the most significant decimalDigits digits.
+ int nanos = rand.nextInt((int) NANOSECONDS_PER_SECOND);
+ return nanos - nanos % (int) Math.pow(10, 9 - decimalDigits);
+ }
+
+ public static int randomNanos(Random rand) {
+ return randomNanos(rand, 9);
+ }
+}
\ No newline at end of file
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
index e1ecdc1d40..71a9cfcab0 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hive.serde2;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -26,6 +24,8 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
index 34da50de83..8cdc567dee 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
@@ -22,8 +22,6 @@
import java.io.IOException;
import java.nio.ByteBuffer;
import java.rmi.server.UID;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@@ -42,12 +40,14 @@
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.UnresolvedUnionException;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveDecimalObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -295,13 +295,13 @@ private Object deserializePrimitive(Object datum, Schema fileSchema, Schema reco
throw new AvroSerdeException("Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType());
}
- return new Date(DateWritable.daysToMillis((Integer)datum));
+ return Date.ofEpochMilli(DateWritableV2.daysToMillis((Integer)datum));
case TIMESTAMP:
if (recordSchema.getType() != Type.LONG) {
throw new AvroSerdeException(
"Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType());
}
- return new Timestamp((Long)datum);
+ return Timestamp.ofEpochMilli((Long)datum);
default:
return datum;
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
index b4c9c22b30..99a0b9a487 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
@@ -17,8 +17,6 @@
*/
package org.apache.hadoop.hive.serde2.avro;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
@@ -28,16 +26,15 @@
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
-import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericData.Fixed;
import org.apache.avro.generic.GenericEnumSymbol;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -210,11 +207,11 @@ private Object serializePrimitive(TypeInfo typeInfo, PrimitiveObjectInspector fi
return vc.getValue();
case DATE:
Date date = ((DateObjectInspector)fieldOI).getPrimitiveJavaObject(structFieldData);
- return DateWritable.dateToDays(date);
+ return DateWritableV2.dateToDays(date);
case TIMESTAMP:
Timestamp timestamp =
((TimestampObjectInspector) fieldOI).getPrimitiveJavaObject(structFieldData);
- return timestamp.getTime();
+ return timestamp.toEpochMilli();
case UNKNOWN:
throw new AvroSerdeException("Received UNKNOWN primitive category.");
case VOID:
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
index a48d4fe9cf..2ee89693c3 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
@@ -43,7 +43,7 @@
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -407,8 +407,8 @@ static Object deserialize(InputByteBuffer buffer, TypeInfo type,
}
case DATE: {
- DateWritable d = reuse == null ? new DateWritable()
- : (DateWritable) reuse;
+ DateWritableV2 d = reuse == null ? new DateWritableV2()
+ : (DateWritableV2) reuse;
d.set(deserializeInt(buffer, invert));
return d;
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
index 2f987bf1af..3cd16149eb 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/fast/BinarySortableSerializeWrite.java
@@ -19,21 +19,21 @@
package org.apache.hadoop.hive.serde2.binarysortable.fast;
import java.io.IOException;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.ByteStream.Output;
import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.slf4j.Logger;
@@ -262,7 +262,7 @@ public void writeBinary(byte[] v, int start, int length) {
@Override
public void writeDate(Date date) throws IOException {
beginElement();
- BinarySortableSerDe.serializeInt(output, DateWritable.dateToDays(date), columnSortOrderIsDesc[index]);
+ BinarySortableSerDe.serializeInt(output, DateWritableV2.dateToDays(date), columnSortOrderIsDesc[index]);
}
// We provide a faster way to write a date without a Date object.
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
index 197031de11..389f54e339 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/fast/DeserializeRead.java
@@ -22,7 +22,7 @@
import java.util.Arrays;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
@@ -75,7 +75,7 @@ private void allocateCurrentWritable(TypeInfo typeInfo) {
switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case DATE:
if (currentDateWritable == null) {
- currentDateWritable = new DateWritable();
+ currentDateWritable = new DateWritableV2();
}
break;
case TIMESTAMP:
@@ -343,7 +343,7 @@ public void copyToExternalBuffer(byte[] externalBuffer, int externalBufferStart)
/*
* DATE.
*/
- public DateWritable currentDateWritable;
+ public DateWritableV2 currentDateWritable;
/*
* TIMESTAMP.
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
index 3aff6106eb..4d4717fade 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/fast/SerializeWrite.java
@@ -19,17 +19,17 @@
package org.apache.hadoop.hive.serde2.fast;
import java.io.IOException;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.ByteStream.Output;
/*
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritableV2.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritableV2.java
new file mode 100644
index 0000000000..f57566111d
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritableV2.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.io;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.io.WritableUtils;
+
+
+/**
+ * DateWritableV2
+ * Writable equivalent of java.sql.Date.
+ *
+ * Dates are of the format
+ * YYYY-MM-DD
+ *
+ */
+public class DateWritableV2 implements WritableComparable {
+
+ private Date date = new Date();
+
+ /* Constructors */
+ public DateWritableV2() {
+ }
+
+ public DateWritableV2(DateWritableV2 d) {
+ set(d);
+ }
+
+ public DateWritableV2(Date d) {
+ set(d);
+ }
+
+ public DateWritableV2(int d) {
+ set(d);
+ }
+
+ /**
+ * Set the DateWritableV2 based on the days since epoch date.
+ * @param d integer value representing days since epoch date
+ */
+ public void set(int d) {
+ date = Date.ofEpochDay(d);
+ }
+
+ /**
+ * Set the DateWritableV2 based on the year/month/day of the date in the local timezone.
+ * @param d Date value
+ */
+ public void set(Date d) {
+ if (d == null) {
+ date = new Date();
+ return;
+ }
+
+ set(d.toEpochDay());
+ }
+
+ public void set(DateWritableV2 d) {
+ set(d.getDays());
+ }
+
+ /**
+ * @return Date value corresponding to the date in the local time zone
+ */
+ public Date get() {
+ return date;
+ }
+
+ public int getDays() {
+ return (int) date.getLocalDate().toEpochDay();
+ }
+
+ /**
+ *
+ * @return time in seconds corresponding to this DateWritableV2
+ */
+ public long getTimeInSeconds() {
+ return date.toEpochSecond();
+ }
+
+ public static Date timeToDate(long seconds) {
+ return Date.ofEpochMilli(seconds * 1000);
+ }
+
+ public static long daysToMillis(int days) {
+ return Date.ofEpochDay(days).toEpochMilli();
+ }
+
+ public static int millisToDays(long millis) {
+ return Date.ofEpochMilli(millis).toEpochDay();
+ }
+
+ public static int dateToDays(Date d) {
+ return d.toEpochDay();
+ }
+
+ @Deprecated
+ public static int dateToDays(java.sql.Date d) {
+ return Date.ofEpochMilli(d.getTime()).toEpochDay();
+ }
+
+ @Override
+ public void readFields(DataInput in) throws IOException {
+ date.setTimeInDays(WritableUtils.readVInt(in));
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ WritableUtils.writeVInt(out, (int) date.toEpochDay());
+ }
+
+ @Override
+ public int compareTo(DateWritableV2 d) {
+ return date.compareTo(d.date);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof DateWritableV2)) {
+ return false;
+ }
+ return compareTo((DateWritableV2) o) == 0;
+ }
+
+ @Override
+ public String toString() {
+ return date.toString();
+ }
+
+ @Override
+ public int hashCode() {
+ return date.hashCode();
+ }
+}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
index 600f922e57..93daa0fba1 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
@@ -20,12 +20,11 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import java.sql.Timestamp;
import java.time.format.DateTimeFormatter;
-import java.util.Date;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.ql.util.TimestampUtils;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.TimestampUtils;
import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
@@ -34,7 +33,6 @@
/**
* TimestampWritable
- * Writable equivalent of java.sq.Timestamp
*
* Timestamps are of the format
* YYYY-MM-DD HH:MM:SS.[fff...]
@@ -67,7 +65,7 @@
public static final DateTimeFormatter DATE_TIME_FORMAT = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
- private Timestamp timestamp = new Timestamp(0);
+ private Timestamp timestamp = new Timestamp();
/**
* true if data is stored in timestamp field rather than byte arrays.
@@ -113,20 +111,12 @@ public void set(byte[] bytes, int offset) {
clearTimestamp();
}
- public void setTime(long time) {
- timestamp.setTime(time);
- bytesEmpty = true;
- timestampEmpty = false;
- }
-
public void set(Timestamp t) {
if (t == null) {
- timestamp.setTime(0);
- timestamp.setNanos(0);
+ timestamp.setLocalDateTime(null);
return;
}
- timestamp.setTime(t.getTime());
- timestamp.setNanos(t.getNanos());
+ timestamp.setLocalDateTime(t.getLocalDateTime());
bytesEmpty = true;
timestampEmpty = false;
}
@@ -144,12 +134,10 @@ public void set(TimestampWritable t) {
}
public static void updateTimestamp(Timestamp timestamp, long secondsAsMillis, int nanos) {
- ((Date) timestamp).setTime(secondsAsMillis);
- timestamp.setNanos(nanos);
+ timestamp.setTimeInMillis(secondsAsMillis, nanos);
}
public void setInternal(long secondsAsMillis, int nanos) {
-
// This is our way of documenting that we are MUTATING the contents of
// this writable's internal timestamp.
updateTimestamp(timestamp, secondsAsMillis, nanos);
@@ -173,7 +161,7 @@ public void writeToByteStream(RandomAccessOutput byteStream) {
*/
public long getSeconds() {
if (!timestampEmpty) {
- return TimestampUtils.millisToSeconds(timestamp.getTime());
+ return timestamp.toEpochSecond();
} else if (!bytesEmpty) {
return TimestampWritable.getSeconds(currentBytes, offset);
} else {
@@ -305,7 +293,7 @@ private void checkBytes() {
public double getDouble() {
double seconds, nanos;
if (bytesEmpty) {
- seconds = TimestampUtils.millisToSeconds(timestamp.getTime());
+ seconds = timestamp.toEpochSecond();
nanos = timestamp.getNanos();
} else {
seconds = getSeconds();
@@ -315,7 +303,7 @@ public double getDouble() {
}
public static long getLong(Timestamp timestamp) {
- return timestamp.getTime() / 1000;
+ return timestamp.toEpochSecond();
}
public void readFields(DataInput in) throws IOException {
@@ -386,7 +374,17 @@ public String toString() {
return timestamp.toString();
}
- return timestamp.toLocalDateTime().format(DATE_TIME_FORMAT);
+ String timestampString = timestamp.toString();
+ if (timestampString.length() > 19) {
+ if (timestampString.length() == 21) {
+ if (timestampString.substring(19).compareTo(".0") == 0) {
+ return timestamp.getLocalDateTime().format(DATE_TIME_FORMAT);
+ }
+ }
+ return timestamp.getLocalDateTime().format(DATE_TIME_FORMAT) + timestampString.substring(19);
+ }
+
+ return timestamp.getLocalDateTime().format(DATE_TIME_FORMAT);
}
@Override
@@ -400,8 +398,7 @@ public int hashCode() {
private void populateTimestamp() {
long seconds = getSeconds();
int nanos = getNanos();
- timestamp.setTime(seconds * 1000);
- timestamp.setNanos(nanos);
+ timestamp.setTimeInSeconds(seconds, nanos);
}
/** Static methods **/
@@ -461,10 +458,9 @@ public static int getNanos(byte[] bytes, int offset) {
*/
public static void convertTimestampToBytes(Timestamp t, byte[] b,
int offset) {
- long millis = t.getTime();
+ long seconds = t.toEpochSecond();
int nanos = t.getNanos();
- long seconds = TimestampUtils.millisToSeconds(millis);
boolean hasSecondVInt = seconds < 0 || seconds > Integer.MAX_VALUE;
boolean hasDecimal = setNanosBytes(nanos, b, offset+4, hasSecondVInt);
@@ -527,29 +523,32 @@ public static HiveDecimal getHiveDecimal(Timestamp timestamp) {
return result;
}
-
/**
* Converts the time in seconds or milliseconds to a timestamp.
* @param time time in seconds or in milliseconds
* @return the timestamp
*/
public static Timestamp longToTimestamp(long time, boolean intToTimestampInSeconds) {
- // If the time is in seconds, converts it to milliseconds first.
- return new Timestamp(intToTimestampInSeconds ? time * 1000 : time);
+ // If the time is in seconds, converts it to milliseconds first.
+ if (intToTimestampInSeconds) {
+ return Timestamp.ofEpochSecond(time);
+ }
+ return Timestamp.ofEpochMilli(time);
}
public static void setTimestamp(Timestamp t, byte[] bytes, int offset) {
long seconds = getSeconds(bytes, offset);
- t.setTime(seconds * 1000);
+ int nanos;
if (hasDecimalOrSecondVInt(bytes[offset])) {
- t.setNanos(getNanos(bytes, offset + 4));
+ nanos = getNanos(bytes, offset + 4);
} else {
- t.setNanos(0);
+ nanos = 0;
}
+ t.setTimeInSeconds(seconds, nanos);
}
public static Timestamp createTimestamp(byte[] bytes, int offset) {
- Timestamp t = new Timestamp(0);
+ Timestamp t = new Timestamp();
TimestampWritable.setTimestamp(t, bytes, offset);
return t;
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
index c50cd40cd9..e4643397b8 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyDate.java
@@ -20,11 +20,11 @@
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
-import java.sql.Date;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyDateObjectInspector;
import org.apache.hadoop.io.Text;
@@ -36,17 +36,17 @@
* YYYY-MM-DD
*
*/
-public class LazyDate extends LazyPrimitive {
+public class LazyDate extends LazyPrimitive {
private static final Logger LOG = LoggerFactory.getLogger(LazyDate.class);
public LazyDate(LazyDateObjectInspector oi) {
super(oi);
- data = new DateWritable();
+ data = new DateWritableV2();
}
public LazyDate(LazyDate copy) {
super(copy);
- data = new DateWritable(copy.data);
+ data = new DateWritableV2(copy.data);
}
/**
@@ -81,7 +81,7 @@ public void init(ByteArrayRef bytes, int start, int length) {
* The Date to write
* @throws IOException
*/
- public static void writeUTF8(OutputStream out, DateWritable d)
+ public static void writeUTF8(OutputStream out, DateWritableV2 d)
throws IOException {
ByteBuffer b = Text.encode(d.toString());
out.write(b.array(), 0, b.limit());
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
index ee801eedb6..730aecd016 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyTimestamp.java
@@ -20,10 +20,10 @@
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
-import java.sql.Timestamp;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyTimestampObjectInspector;
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java
index 17c0357da6..53b0cc7195 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/VerifyLazy.java
@@ -17,20 +17,20 @@
*/
package org.apache.hadoop.hive.serde2.lazy;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -301,7 +301,7 @@ public static boolean lazyCompare(TypeInfo typeInfo, Object lazyObject, Object e
throw new RuntimeException("Expected LazyDate");
}
Date value = ((LazyDate) primitiveObject).getWritableObject().get();
- Date expected = ((DateWritable) expectedObject).get();
+ Date expected = ((DateWritableV2) expectedObject).get();
if (!value.equals(expected)) {
throw new RuntimeException("Date field mismatch (expected " + expected + " found " + value + ")");
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
index dd88da889c..0274ff76ea 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleDeserializeRead.java
@@ -21,10 +21,10 @@
import java.io.IOException;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.StandardCharsets;
-import java.sql.Date;
import java.util.Arrays;
import java.util.List;
+import org.apache.hadoop.hive.common.type.Date;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
index 356326cb41..8755bfd361 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/fast/LazySimpleSerializeWrite.java
@@ -20,14 +20,14 @@
import java.io.IOException;
import java.nio.ByteBuffer;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.List;
import java.util.Map;
import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.type.HiveChar;
@@ -35,8 +35,8 @@
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.ByteStream.Output;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
@@ -77,7 +77,7 @@
private Deque indexStack = new ArrayDeque();
// For thread safety, we allocate private writable objects for our use only.
- private DateWritable dateWritable;
+ private DateWritableV2 dateWritable;
private TimestampWritable timestampWritable;
private HiveIntervalYearMonthWritable hiveIntervalYearMonthWritable;
private HiveIntervalDayTimeWritable hiveIntervalDayTimeWritable;
@@ -299,7 +299,7 @@ public void writeBinary(byte[] v, int start, int length) throws IOException {
public void writeDate(Date date) throws IOException {
beginPrimitive();
if (dateWritable == null) {
- dateWritable = new DateWritable();
+ dateWritable = new DateWritableV2();
}
dateWritable.set(date);
LazyDate.writeUTF8(output, dateWritable);
@@ -311,7 +311,7 @@ public void writeDate(Date date) throws IOException {
public void writeDate(int dateAsDays) throws IOException {
beginPrimitive();
if (dateWritable == null) {
- dateWritable = new DateWritable();
+ dateWritable = new DateWritableV2();
}
dateWritable.set(dateAsDays);
LazyDate.writeUTF8(output, dateWritable);
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java
index 3bc4ff71fd..e356d23d40 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyDateObjectInspector.java
@@ -17,18 +17,17 @@
*/
package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive;
-import java.sql.Date;
-
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.lazy.LazyDate;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
/**
- * A WritableDateObjectInspector inspects a DateWritable Object.
+ * A WritableDateObjectInspector inspects a DateWritableV2 Object.
*/
public class LazyDateObjectInspector
- extends AbstractPrimitiveLazyObjectInspector
+ extends AbstractPrimitiveLazyObjectInspector
implements DateObjectInspector {
protected LazyDateObjectInspector() {
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
index e0f993e145..dc8b687367 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyTimestampObjectInspector.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive;
-import java.sql.Timestamp;
import java.util.List;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyTimestamp;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java
index 2952e26b93..fbfe961cff 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryDate.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hive.serde2.lazybinary;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector;
@@ -29,17 +29,17 @@
*
*/
public class LazyBinaryDate extends
- LazyBinaryPrimitive {
+ LazyBinaryPrimitive {
static final Logger LOG = LoggerFactory.getLogger(LazyBinaryDate.class);
LazyBinaryDate(WritableDateObjectInspector oi) {
super(oi);
- data = new DateWritable();
+ data = new DateWritableV2();
}
LazyBinaryDate(LazyBinaryDate copy) {
super(copy);
- data = new DateWritable(copy.data);
+ data = new DateWritableV2(copy.data);
}
/**
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
index 24704a1935..9b2604bab8 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
@@ -24,6 +24,7 @@
import java.util.Map;
import java.util.Properties;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector;
import org.slf4j.Logger;
@@ -37,7 +38,6 @@
import org.apache.hadoop.hive.serde2.SerDeSpec;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.SerDeUtils;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
@@ -318,7 +318,7 @@ public BooleanRef(boolean v) {
}
public static void writeDateToByteStream(RandomAccessOutput byteStream,
- DateWritable date) {
+ DateWritableV2 date) {
LazyBinaryUtils.writeVInt(byteStream, date.getDays());
}
@@ -505,7 +505,7 @@ public static void serialize(RandomAccessOutput byteStream, Object obj,
}
case DATE: {
- DateWritable d = ((DateObjectInspector) poi).getPrimitiveWritableObject(obj);
+ DateWritableV2 d = ((DateObjectInspector) poi).getPrimitiveWritableObject(obj);
writeDateToByteStream(byteStream, d);
return;
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe2.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe2.java
index b328508408..f1a966da63 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe2.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe2.java
@@ -27,7 +27,7 @@
import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeSpec;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
@@ -337,7 +337,7 @@ void serialize(RandomAccessOutput byteStream, Object obj, ObjectInspector objIns
@Override
void serialize(RandomAccessOutput byteStream, Object obj, ObjectInspector objInspector,
boolean skipLengthPrefix, BooleanRef warnedOnceNullMapKey) {
- DateWritable d = ((DateObjectInspector) objInspector).getPrimitiveWritableObject(obj);
+ DateWritableV2 d = ((DateObjectInspector) objInspector).getPrimitiveWritableObject(obj);
LazyBinarySerDe.writeDateToByteStream(byteStream, d);
}
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
index cd4e619e3b..14a0dfdd76 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/fast/LazyBinarySerializeWrite.java
@@ -19,13 +19,12 @@
package org.apache.hadoop.hive.serde2.lazybinary.fast;
import java.io.IOException;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.slf4j.Logger;
@@ -35,8 +34,9 @@
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.ByteStream.Output;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
@@ -308,7 +308,7 @@ public void writeBinary(byte[] v, int start, int length) throws IOException {
@Override
public void writeDate(Date date) throws IOException {
beginElement();
- writeVInt(DateWritable.dateToDays(date));
+ writeVInt(DateWritableV2.dateToDays(date));
finishElement();
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
index a442cb1228..95dbce2506 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
@@ -31,6 +31,7 @@
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableTimestampLocalTZObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampLocalTZObjectInspector;
@@ -39,7 +40,6 @@
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
@@ -1112,9 +1112,9 @@ public static int compare(Object o1, ObjectInspector oi1, Object o2,
}
case DATE: {
- DateWritable d1 = ((DateObjectInspector) poi1)
+ DateWritableV2 d1 = ((DateObjectInspector) poi1)
.getPrimitiveWritableObject(o1);
- DateWritable d2 = ((DateObjectInspector) poi2)
+ DateWritableV2 d2 = ((DateObjectInspector) poi2)
.getPrimitiveWritableObject(o2);
return d1.compareTo(d2);
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java
index 93a18f7337..f58364c1d7 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/DateObjectInspector.java
@@ -17,11 +17,10 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Date;
-
import org.apache.hadoop.hive.common.classification.InterfaceAudience;
import org.apache.hadoop.hive.common.classification.InterfaceStability;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
/**
@@ -31,7 +30,7 @@
@InterfaceStability.Stable
public interface DateObjectInspector extends PrimitiveObjectInspector {
- DateWritable getPrimitiveWritableObject(Object o);
+ DateWritableV2 getPrimitiveWritableObject(Object o);
Date getPrimitiveJavaObject(Object o);
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java
index bd86c2225d..7dc3d0702f 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java
@@ -17,9 +17,8 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Date;
-
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
public class JavaConstantDateObjectInspector extends JavaDateObjectInspector
@@ -36,6 +35,6 @@ public Object getWritableConstantValue() {
if (value==null) {
return null;
}
- return new DateWritable(value);
+ return new DateWritableV2(value);
}
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java
index 2453bc67cb..6bd61ed823 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java
@@ -17,8 +17,7 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java
index d93d71992d..4cf0a60b48 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaDateObjectInspector.java
@@ -17,9 +17,8 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Date;
-
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
/**
@@ -33,8 +32,8 @@ protected JavaDateObjectInspector() {
super(TypeInfoFactory.dateTypeInfo);
}
- public DateWritable getPrimitiveWritableObject(Object o) {
- return o == null ? null : new DateWritable((Date) o);
+ public DateWritableV2 getPrimitiveWritableObject(Object o) {
+ return o == null ? null : new DateWritableV2((Date) o);
}
@Override
@@ -50,20 +49,34 @@ public Object set(Object o, Date value) {
if (value == null) {
return null;
}
- ((Date) o).setTime(value.getTime());
+ ((Date) o).setTimeInDays(value.toEpochDay());
return o;
}
- public Object set(Object o, DateWritable d) {
+ @Deprecated
+ public Object set(Object o, java.sql.Date value) {
+ if (value == null) {
+ return null;
+ }
+ ((Date) o).setTimeInMillis(value.getTime());
+ return o;
+ }
+
+ public Object set(Object o, DateWritableV2 d) {
if (d == null) {
return null;
}
- ((Date) o).setTime(d.get().getTime());
+ ((Date) o).setTimeInDays(d.get().toEpochDay());
return o;
}
+ @Deprecated
+ public Object create(java.sql.Date value) {
+ return Date.ofEpochMilli(value.getTime());
+ }
+
public Object create(Date value) {
- return new Date(value.getTime());
+ return Date.ofEpochDay(value.toEpochDay());
}
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
index 1e805ba974..da366c582e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
@@ -17,8 +17,7 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -45,20 +44,27 @@ public Object copyObject(Object o) {
return null;
}
Timestamp source = (Timestamp) o;
- Timestamp copy = new Timestamp(source.getTime());
- copy.setNanos(source.getNanos());
- return copy;
+ return new Timestamp(source.getLocalDateTime());
}
public Timestamp get(Object o) {
return (Timestamp) o;
}
+ @Deprecated
+ public Object set(Object o, java.sql.Timestamp value) {
+ if (value == null) {
+ return null;
+ }
+ ((Timestamp) o).setTimeInMillis(value.getTime(), value.getNanos());
+ return o;
+ }
+
public Object set(Object o, Timestamp value) {
if (value == null) {
return null;
}
- ((Timestamp) o).setTime(value.getTime());
+ ((Timestamp) o).setLocalDateTime(value.getLocalDateTime());
return o;
}
@@ -72,13 +78,17 @@ public Object set(Object o, TimestampWritable tw) {
return null;
}
Timestamp t = (Timestamp) o;
- t.setTime(tw.getTimestamp().getTime());
- t.setNanos(tw.getTimestamp().getNanos());
+ t.setLocalDateTime(tw.getTimestamp().getLocalDateTime());
return t;
}
+ @Deprecated
+ public Object create(java.sql.Timestamp value) {
+ return Timestamp.ofEpochMilli(value.getTime(), value.getNanos());
+ }
+
public Object create(Timestamp value) {
- return new Timestamp(value.getTime());
+ return new Timestamp(value.getLocalDateTime());
}
public Object create(byte[] bytes, int offset) {
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
index ba20a2c56d..1e12ccaf3e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java
@@ -18,15 +18,15 @@
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.time.ZoneId;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZ;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
@@ -255,7 +255,7 @@ public DateConverter(PrimitiveObjectInspector inputOI,
SettableDateObjectInspector outputOI) {
this.inputOI = inputOI;
this.outputOI = outputOI;
- r = outputOI.create(new Date(0));
+ r = outputOI.create(new Date());
}
public Object convert(Object input) {
@@ -277,7 +277,7 @@ public TimestampConverter(PrimitiveObjectInspector inputOI,
SettableTimestampObjectInspector outputOI) {
this.inputOI = inputOI;
this.outputOI = outputOI;
- r = outputOI.create(new Timestamp(0));
+ r = outputOI.create(new Timestamp());
}
public void setIntToTimestampInSeconds(boolean intToTimestampInSeconds) {
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
index 10af3dc0f7..a0410559fc 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
@@ -24,7 +24,7 @@
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -346,7 +346,7 @@ public static ConstantObjectInspector getPrimitiveWritableConstantObjectInspecto
return new WritableConstantHiveVarcharObjectInspector((VarcharTypeInfo)typeInfo,
(HiveVarcharWritable)value);
case DATE:
- return new WritableConstantDateObjectInspector((DateWritable)value);
+ return new WritableConstantDateObjectInspector((DateWritableV2)value);
case TIMESTAMP:
return new WritableConstantTimestampObjectInspector((TimestampWritable)value);
case TIMESTAMPLOCALTZ:
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
index 8cf0744529..5052d24d41 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java
@@ -23,8 +23,6 @@
import java.io.IOException;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.StandardCharsets;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.time.DateTimeException;
import java.time.ZoneId;
import java.util.HashMap;
@@ -32,18 +30,19 @@
import org.apache.hadoop.hive.common.classification.InterfaceAudience;
import org.apache.hadoop.hive.common.classification.InterfaceStability;
-import org.apache.hadoop.hive.common.type.TimestampTZ;
-import org.apache.hadoop.hive.common.type.TimestampTZUtil;
-import org.apache.hadoop.hive.ql.util.TimestampUtils;
-import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.common.type.TimestampTZ;
+import org.apache.hadoop.hive.common.type.TimestampTZUtil;
+import org.apache.hadoop.hive.common.type.TimestampUtils;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -51,6 +50,7 @@
import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampLocalTZWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
import org.apache.hadoop.hive.serde2.lazy.LazyLong;
@@ -229,7 +229,7 @@ static void registerType(PrimitiveTypeEntry t) {
Short.class, ShortWritable.class);
public static final PrimitiveTypeEntry dateTypeEntry = new PrimitiveTypeEntry(
PrimitiveCategory.DATE, serdeConstants.DATE_TYPE_NAME, null,
- Date.class, DateWritable.class);
+ Date.class, DateWritableV2.class);
public static final PrimitiveTypeEntry timestampTypeEntry = new PrimitiveTypeEntry(
PrimitiveCategory.TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME, null,
Timestamp.class, TimestampWritable.class);
@@ -1126,7 +1126,7 @@ public static Date getDate(Object o, PrimitiveObjectInspector oi) {
} catch (IllegalArgumentException e) {
Timestamp ts = getTimestampFromString(s);
if (ts != null) {
- result = new Date(ts.getTime());
+ result = Date.ofEpochMilli(ts.toEpochMilli());
} else {
result = null;
}
@@ -1140,7 +1140,7 @@ public static Date getDate(Object o, PrimitiveObjectInspector oi) {
} catch (IllegalArgumentException e) {
Timestamp ts = getTimestampFromString(val);
if (ts != null) {
- result = new Date(ts.getTime());
+ result = Date.ofEpochMilli(ts.toEpochMilli());
} else {
result = null;
}
@@ -1151,7 +1151,7 @@ public static Date getDate(Object o, PrimitiveObjectInspector oi) {
result = ((DateObjectInspector) oi).getPrimitiveWritableObject(o).get();
break;
case TIMESTAMP:
- result = DateWritable.timeToDate(
+ result = DateWritableV2.timeToDate(
((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getSeconds());
break;
case TIMESTAMPLOCALTZ:
@@ -1212,8 +1212,8 @@ public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector inputOI,
result = TimestampUtils.doubleToTimestamp(((DoubleObjectInspector) inputOI).get(o));
break;
case DECIMAL:
- result = TimestampUtils.decimalToTimestamp(((HiveDecimalObjectInspector) inputOI)
- .getPrimitiveJavaObject(o));
+ result = TimestampUtils.decimalToTimestamp(
+ ((HiveDecimalObjectInspector) inputOI).getPrimitiveJavaObject(o));
break;
case STRING:
StringObjectInspector soi = (StringObjectInspector) inputOI;
@@ -1225,8 +1225,8 @@ public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector inputOI,
result = getTimestampFromString(getString(o, inputOI));
break;
case DATE:
- result = new Timestamp(
- ((DateObjectInspector) inputOI).getPrimitiveWritableObject(o).get().getTime());
+ result = Timestamp.ofEpochMilli(
+ ((DateObjectInspector) inputOI).getPrimitiveWritableObject(o).get().toEpochMilli());
break;
case TIMESTAMP:
result = ((TimestampObjectInspector) inputOI).getPrimitiveWritableObject(o).getTimestamp();
@@ -1247,21 +1247,17 @@ public static Timestamp getTimestamp(Object o, PrimitiveObjectInspector inputOI,
return result;
}
- static Timestamp getTimestampFromString(String s) {
+ public static Timestamp getTimestampFromString(String s) {
Timestamp result;
s = s.trim();
s = trimNanoTimestamp(s);
- int firstSpace = s.indexOf(' ');
- if (firstSpace < 0) {
- s = s.concat(" 00:00:00");
- }
try {
result = Timestamp.valueOf(s);
} catch (IllegalArgumentException e) {
// Let's try to parse it as timestamp with time zone and transform
try {
- result = Timestamp.from(TimestampTZUtil.parse(s).getZonedDateTime().toInstant());
+ result = new Timestamp(TimestampTZUtil.parse(s).getZonedDateTime().toLocalDateTime());
} catch (DateTimeException e2) {
result = null;
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java
index 831411defe..725d5cd19b 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableDateObjectInspector.java
@@ -17,17 +17,22 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Date;
-
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
/**
* A SettableDecimalObjectInspector can set a Date value to an object.
*/
public interface SettableDateObjectInspector extends DateObjectInspector {
+ @Deprecated
+ Object set(Object o, java.sql.Date d);
+
Object set(Object o, Date d);
- Object set(Object o, DateWritable d);
+ Object set(Object o, DateWritableV2 d);
+
+ @Deprecated
+ Object create(java.sql.Date d);
Object create(Date d);
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java
index c676a62266..be73ac91eb 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableTimestampObjectInspector.java
@@ -17,8 +17,7 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -26,11 +25,17 @@
Object set(Object o, byte[] bytes, int offset);
+ @Deprecated
+ Object set(Object o, java.sql.Timestamp t);
+
Object set(Object o, Timestamp t);
Object set(Object o, TimestampWritable t);
Object create(byte[] bytes, int offset);
+ @Deprecated
+ Object create (java.sql.Timestamp t);
+
Object create (Timestamp t);
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java
index f277232960..cabc4e8988 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/TimestampObjectInspector.java
@@ -17,10 +17,9 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Timestamp;
-
import org.apache.hadoop.hive.common.classification.InterfaceAudience;
import org.apache.hadoop.hive.common.classification.InterfaceStability;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java
index 290fcd3ca3..7e6cc8d155 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantDateObjectInspector.java
@@ -17,10 +17,9 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-
/**
* A WritableConstantDateObjectInspector is a WritableDateObjectInspector
* that implements ConstantObjectInspector.
@@ -29,18 +28,18 @@
WritableDateObjectInspector implements
ConstantObjectInspector {
- private DateWritable value;
+ private DateWritableV2 value;
protected WritableConstantDateObjectInspector() {
super();
}
- WritableConstantDateObjectInspector(DateWritable value) {
+ WritableConstantDateObjectInspector(DateWritableV2 value) {
super();
this.value = value;
}
@Override
- public DateWritable getWritableConstantValue() {
+ public DateWritableV2 getWritableConstantValue() {
return value;
}
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
index 5d73806844..6a96dddca9 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
@@ -17,13 +17,12 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Date;
-
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
/**
- * A WritableDateObjectInspector inspects a DateWritable Object.
+ * A WritableDateObjectInspector inspects a DateWritableV2 Object.
*/
public class WritableDateObjectInspector extends
AbstractPrimitiveWritableObjectInspector implements
@@ -34,35 +33,49 @@ public WritableDateObjectInspector() {
}
@Override
- public DateWritable getPrimitiveWritableObject(Object o) {
- return o == null ? null : (DateWritable) o;
+ public DateWritableV2 getPrimitiveWritableObject(Object o) {
+ return o == null ? null : (DateWritableV2) o;
}
public Date getPrimitiveJavaObject(Object o) {
- return o == null ? null : ((DateWritable) o).get();
+ return o == null ? null : ((DateWritableV2) o).get();
}
public Object copyObject(Object o) {
- return o == null ? null : new DateWritable((DateWritable) o);
+ return o == null ? null : new DateWritableV2((DateWritableV2) o);
}
public Object set(Object o, Date d) {
if (d == null) {
return null;
}
- ((DateWritable) o).set(d);
+ ((DateWritableV2) o).set(d);
return o;
}
- public Object set(Object o, DateWritable d) {
+ @Deprecated
+ public Object set(Object o, java.sql.Date d) {
if (d == null) {
return null;
}
- ((DateWritable) o).set(d);
+ ((DateWritableV2) o).set(Date.ofEpochMilli(d.getTime()));
return o;
}
+ public Object set(Object o, DateWritableV2 d) {
+ if (d == null) {
+ return null;
+ }
+ ((DateWritableV2) o).set(d);
+ return o;
+ }
+
+ @Deprecated
+ public Object create(java.sql.Date value) {
+ return new DateWritableV2(Date.ofEpochMilli(value.getTime()));
+ }
+
public Object create(Date d) {
- return new DateWritable(d);
+ return new DateWritableV2(d);
}
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
index 47b51f5da8..5105c32189 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
@@ -17,8 +17,7 @@
*/
package org.apache.hadoop.hive.serde2.objectinspector.primitive;
-import java.sql.Timestamp;
-
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -48,6 +47,15 @@ public Object set(Object o, byte[] bytes, int offset) {
return o;
}
+ @Deprecated
+ public Object set(Object o, java.sql.Timestamp t) {
+ if (t == null) {
+ return null;
+ }
+ ((TimestampWritable) o).set(Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+ return o;
+ }
+
public Object set(Object o, Timestamp t) {
if (t == null) {
return null;
@@ -68,6 +76,10 @@ public Object create(byte[] bytes, int offset) {
return new TimestampWritable(bytes, offset);
}
+ public Object create(java.sql.Timestamp t) {
+ return new TimestampWritable(Timestamp.ofEpochMilli(t.getTime(), t.getNanos()));
+ }
+
public Object create(Timestamp t) {
return new TimestampWritable(t);
}
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
index 749d8accf7..c0f9726d99 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
@@ -18,19 +18,18 @@
package org.apache.hadoop.hive.serde2;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java b/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
index 22aadbb843..ffb7c40226 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
@@ -19,8 +19,6 @@
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -29,16 +27,18 @@
import junit.framework.TestCase;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -244,7 +244,7 @@ public static void doVerifyDeserializeRead(DeserializeRead deserializeRead,
case DATE:
{
Date value = deserializeRead.currentDateWritable.get();
- Date expected = ((DateWritable) object).get();
+ Date expected = ((DateWritableV2) object).get();
if (!value.equals(expected)) {
TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
}
@@ -394,7 +394,7 @@ public static void serializeWrite(SerializeWrite serializeWrite,
break;
case DATE:
{
- Date value = ((DateWritable) object).get();
+ Date value = ((DateWritableV2) object).get();
serializeWrite.writeDate(value);
}
break;
@@ -571,7 +571,7 @@ private static Object doReadComplexPrimitiveField(DeserializeRead deserializeRea
case DECIMAL:
return new HiveDecimalWritable(deserializeRead.currentHiveDecimalWritable);
case DATE:
- return new DateWritable(deserializeRead.currentDateWritable);
+ return new DateWritableV2(deserializeRead.currentDateWritable);
case TIMESTAMP:
return new TimestampWritable(deserializeRead.currentTimestampWritable);
case INTERVAL_YEAR_MONTH:
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
index c270d71470..a486ab1931 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
@@ -17,18 +17,18 @@
*/
package org.apache.hadoop.hive.serde2.binarysortable;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
index 82d126a428..6febc36b1b 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
@@ -17,26 +17,21 @@
*/
package org.apache.hadoop.hive.serde2.binarysortable;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
-import junit.framework.TestCase;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.common.type.HiveBaseChar;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -46,12 +41,12 @@
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
-import org.apache.hadoop.io.Writable;
import org.apache.hive.common.util.DateUtils;
+import junit.framework.TestCase;
+
// Just the primitive types.
public class MyTestPrimitiveClass {
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritableV2.java
similarity index 76%
rename from serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
rename to serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritableV2.java
index 97eb967096..262e55aafd 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritableV2.java
@@ -18,29 +18,35 @@
package org.apache.hadoop.hive.serde2.io;
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
+import com.google.code.tempusfugit.concurrency.ConcurrentRule;
+import com.google.code.tempusfugit.concurrency.RepeatingRule;
+import com.google.code.tempusfugit.concurrency.annotations.Concurrent;
+import com.google.code.tempusfugit.concurrency.annotations.Repeating;
+import org.apache.hadoop.hive.common.type.Date;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.junit.Assert.*;
-import java.io.*;
-import java.sql.Date;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
import java.util.Calendar;
-import java.util.GregorianCalendar;
import java.util.LinkedList;
import java.util.TimeZone;
import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-public class TestDateWritable {
- private static final Logger LOG = LoggerFactory.getLogger(TestDateWritable.class);
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class TestDateWritableV2 {
+ private static final Logger LOG = LoggerFactory.getLogger(TestDateWritableV2.class);
@Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
@Rule public RepeatingRule repeatingRule = new RepeatingRule();
@@ -50,9 +56,9 @@
@Repeating(repetition=100)
public void testConstructor() {
Date date = Date.valueOf(getRandomDateString());
- DateWritable dw1 = new DateWritable(date);
- DateWritable dw2 = new DateWritable(dw1);
- DateWritable dw3 = new DateWritable(dw1.getDays());
+ DateWritableV2 dw1 = new DateWritableV2(date);
+ DateWritableV2 dw2 = new DateWritableV2(dw1);
+ DateWritableV2 dw3 = new DateWritableV2(dw1.getDays());
assertEquals(dw1, dw1);
assertEquals(dw1, dw2);
@@ -73,9 +79,9 @@ public void testComparison() {
date2 = Date.valueOf(getRandomDateString());
}
- DateWritable dw1 = new DateWritable(date1);
- DateWritable dw2 = new DateWritable(date2);
- DateWritable dw3 = new DateWritable(date1);
+ DateWritableV2 dw1 = new DateWritableV2(date1);
+ DateWritableV2 dw2 = new DateWritableV2(date2);
+ DateWritableV2 dw3 = new DateWritableV2(date1);
assertTrue("Dates should be equal", dw1.equals(dw1));
assertTrue("Dates should be equal", dw1.equals(dw3));
@@ -97,14 +103,14 @@ public void testGettersSetters() {
Date date1 = Date.valueOf(getRandomDateString());
Date date2 = Date.valueOf(getRandomDateString());
Date date3 = Date.valueOf(getRandomDateString());
- DateWritable dw1 = new DateWritable(date1);
- DateWritable dw2 = new DateWritable(date2);
- DateWritable dw3 = new DateWritable(date3);
- DateWritable dw4 = new DateWritable();
+ DateWritableV2 dw1 = new DateWritableV2(date1);
+ DateWritableV2 dw2 = new DateWritableV2(date2);
+ DateWritableV2 dw3 = new DateWritableV2(date3);
+ DateWritableV2 dw4 = new DateWritableV2();
// Getters
assertEquals(date1, dw1.get());
- assertEquals(date1.getTime() / 1000, dw1.getTimeInSeconds());
+ assertEquals(date1.toEpochSecond(), dw1.getTimeInSeconds());
dw4.set(Date.valueOf("1970-01-02"));
assertEquals(1, dw4.getDays());
@@ -126,8 +132,8 @@ public void testGettersSetters() {
@Concurrent(count=4)
@Repeating(repetition=100)
public void testWritableMethods() throws Throwable {
- DateWritable dw1 = new DateWritable(Date.valueOf(getRandomDateString()));
- DateWritable dw2 = new DateWritable();
+ DateWritableV2 dw1 = new DateWritableV2(Date.valueOf(getRandomDateString()));
+ DateWritableV2 dw2 = new DateWritableV2();
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
DataOutput out = new DataOutputStream(byteStream);
@@ -151,12 +157,11 @@ public void testDateValueOf() {
@BeforeClass
public static void setupDateStrings() {
- DateFormat format = new SimpleDateFormat("yyyy-MM-dd");
Date initialDate = Date.valueOf("2014-01-01");
Calendar cal = Calendar.getInstance();
- cal.setTime(initialDate);
+ cal.setTimeInMillis(initialDate.toEpochMilli());
for (int idx = 0; idx < 365; ++idx) {
- dateStrings[idx] = format.format(cal.getTime());
+ dateStrings[idx] = Date.ofEpochMilli(cal.getTimeInMillis()).toString();
cal.add(1, Calendar.DAY_OF_YEAR);
}
}
@@ -176,21 +181,20 @@ public DateTestCallable(LinkedList bad, String tz) {
@Override
public Void call() throws Exception {
- SimpleDateFormat sdf = new SimpleDateFormat("YYYY-MM-dd HH:mm:ss");
- // Iterate through each day of the year, make sure Date/DateWritable match
+ // Iterate through each day of the year, make sure Date/DateWritableV2 match
Date originalDate = Date.valueOf("1900-01-01");
Calendar cal = Calendar.getInstance();
- cal.setTimeInMillis(originalDate.getTime());
+ cal.setTimeInMillis(originalDate.toEpochMilli());
for (int idx = 0; idx < 365*200; ++idx) {
- originalDate = new Date(cal.getTimeInMillis());
+ originalDate = Date.ofEpochMilli(cal.getTimeInMillis());
// Make sure originalDate is at midnight in the local time zone,
- // since DateWritable will generate dates at that time.
+ // since DateWritableV2 will generate dates at that time.
originalDate = Date.valueOf(originalDate.toString());
- DateWritable dateWritable = new DateWritable(originalDate);
- Date actual = dateWritable.get(false);
+ DateWritableV2 dateWritable = new DateWritableV2(originalDate);
+ Date actual = dateWritable.get();
if (!originalDate.equals(actual)) {
- String originalStr = sdf.format(originalDate);
- String actualStr = sdf.format(actual);
+ String originalStr = originalDate.toString();
+ String actualStr = actual.toString();
if (originalStr.substring(0, 10).equals(actualStr.substring(0, 10))) continue;
bad.add(new DtMismatch(originalStr, actualStr, tz));
}
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
index 3fe472ee8b..0074986505 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
@@ -26,7 +26,6 @@
import java.io.DataOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
-import java.sql.Timestamp;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
@@ -40,6 +39,7 @@
import static org.junit.Assert.*;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
@@ -68,10 +68,10 @@ protected DateFormat initialValue() {
private static long getSeconds(Timestamp ts) {
// To compute seconds, we first subtract the milliseconds stored in the nanos field of the
// Timestamp from the result of getTime().
- long seconds = (ts.getTime() - ts.getNanos() / 1000000) / 1000;
+ long seconds = (ts.toEpochMilli() - ts.getNanos() / 1000000) / 1000;
// It should also be possible to calculate this based on ts.getTime() only.
- assertEquals(seconds, TimestampUtils.millisToSeconds(ts.getTime()));
+ assertEquals(seconds, TimestampUtils.millisToSeconds(ts.toEpochMilli()));
return seconds;
}
@@ -181,9 +181,9 @@ private static TimestampWritable serializeDeserializeAndCheckTimestamp(Timestamp
fromBinSort.setBinarySortable(binarySortableBytes, binarySortableOffset);
assertTSWEquals(tsw, fromBinSort);
- long timeSeconds = ts.getTime() / 1000;
+ long timeSeconds = ts.toEpochSecond();
if (0 <= timeSeconds && timeSeconds <= Integer.MAX_VALUE) {
- assertEquals(new Timestamp(timeSeconds * 1000),
+ assertEquals(Timestamp.ofEpochSecond(timeSeconds),
fromIntAndVInts((int) timeSeconds, 0).getTimestamp());
int nanos = reverseNanos(ts.getNanos());
@@ -288,7 +288,7 @@ public void testTimestampsWithinPositiveIntRange() throws IOException {
Random rand = new Random(294722773L);
for (int i = 0; i < 10000; ++i) {
long millis = ((long) rand.nextInt(Integer.MAX_VALUE)) * 1000;
- checkTimestampWithAndWithoutNanos(new Timestamp(millis), randomNanos(rand));
+ checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(millis), randomNanos(rand));
}
}
@@ -306,7 +306,7 @@ public void testTimestampsOutsidePositiveIntRange() throws IOException {
Random rand = new Random(789149717L);
for (int i = 0; i < 10000; ++i) {
long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
- checkTimestampWithAndWithoutNanos(new Timestamp(millis), randomNanos(rand));
+ checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(millis), randomNanos(rand));
}
}
@@ -315,7 +315,7 @@ public void testTimestampsOutsidePositiveIntRange() throws IOException {
public void testTimestampsInFullRange() throws IOException {
Random rand = new Random(2904974913L);
for (int i = 0; i < 10000; ++i) {
- checkTimestampWithAndWithoutNanos(new Timestamp(rand.nextLong()), randomNanos(rand));
+ checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(rand.nextLong()), randomNanos(rand));
}
}
@@ -326,9 +326,8 @@ public void testToFromDouble() {
for (int nanosPrecision = 0; nanosPrecision <= 4; ++nanosPrecision) {
for (int i = 0; i < 10000; ++i) {
long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
- Timestamp ts = new Timestamp(millis);
int nanos = randomNanos(rand, nanosPrecision);
- ts.setNanos(nanos);
+ Timestamp ts = Timestamp.ofEpochMilli(millis, nanos);
TimestampWritable tsw = new TimestampWritable(ts);
double asDouble = tsw.getDouble();
int recoveredNanos =
@@ -356,7 +355,7 @@ private static HiveDecimal timestampToDecimal(Timestamp ts) {
public void testDecimalToTimestampRandomly() {
Random rand = new Random(294729777L);
for (int i = 0; i < 10000; ++i) {
- Timestamp ts = new Timestamp(
+ Timestamp ts = Timestamp.ofEpochMilli(
randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, rand));
ts.setNanos(randomNanos(rand, 9)); // full precision
assertEquals(ts, TimestampUtils.decimalToTimestamp(timestampToDecimal(ts)));
@@ -367,8 +366,8 @@ public void testDecimalToTimestampRandomly() {
@Concurrent(count=4)
@Repeating(repetition=100)
public void testDecimalToTimestampCornerCases() {
- Timestamp ts = new Timestamp(parseToMillis("1969-03-04 05:44:33"));
- assertEquals(0, ts.getTime() % 1000);
+ Timestamp ts = Timestamp.ofEpochMilli(parseToMillis("1969-03-04 05:44:33"));
+ assertEquals(0, ts.toEpochMilli() % 1000);
for (int nanos : new int[] { 100000, 900000, 999100000, 999900000 }) {
ts.setNanos(nanos);
HiveDecimal d = timestampToDecimal(ts);
@@ -473,8 +472,7 @@ public void testBinarySortable() {
Random rand = new Random(5972977L);
List tswList = new ArrayList();
for (int i = 0; i < 50; ++i) {
- Timestamp ts = new Timestamp(rand.nextLong());
- ts.setNanos(randomNanos(rand));
+ Timestamp ts = Timestamp.ofEpochMilli(rand.nextLong(), randomNanos(rand));
tswList.add(new TimestampWritable(ts));
}
for (TimestampWritable tsw1 : tswList) {
@@ -509,10 +507,10 @@ public void testSetTimestamp() {
}
private static void verifySetTimestamp(long time) {
- Timestamp t1 = new Timestamp(time);
+ Timestamp t1 = Timestamp.ofEpochMilli(time);
TimestampWritable writable = new TimestampWritable(t1);
byte[] bytes = writable.getBytes();
- Timestamp t2 = new Timestamp(0);
+ Timestamp t2 = new Timestamp();
TimestampWritable.setTimestamp(t2, bytes, 0);
assertEquals(t1, t2);
}
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
index 6dcc6f8022..398dc5cf68 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
@@ -17,24 +17,23 @@
*/
package org.apache.hadoop.hive.serde2.lazybinary;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
-import org.apache.hadoop.hive.serde2.binarysortable.TestBinarySortableSerDe;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
/**
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
index 732bd42368..49df56c8e1 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
@@ -17,17 +17,16 @@
*/
package org.apache.hadoop.hive.serde2.lazybinary;
-import java.sql.Date;
-import java.sql.Timestamp;
import java.util.Random;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
-import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
index 3b5f3bae90..f11a319eac 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
@@ -81,7 +81,7 @@ public TimestampColumnVector(int len) {
scratchWritable = null; // Allocated by caller.
- isUTC = false;
+ isUTC = true;
}
/**
diff --git a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
index b5220a0081..db58549c18 100644
--- a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
+++ b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
@@ -1602,14 +1602,14 @@ private void generateFilterColumnBetweenDynamicValue(String[] tdesc) throws Exce
vectorType = "long";
getPrimitiveMethod = "getDate";
getValueMethod = "";
- conversionMethod = "DateWritable.dateToDays";
+ conversionMethod = "DateWritableV2.dateToDays";
// Special case - Date requires its own specific BetweenDynamicValue class, but derives from FilterLongColumnBetween
typeName = "Long";
} else if (operandType.equals("timestamp")) {
defaultValue = "new Timestamp(0)";
vectorType = "Timestamp";
getPrimitiveMethod = "getTimestamp";
- getValueMethod = "";
+ getValueMethod = ".toSqlTimestamp()";
conversionMethod = "";
} else {
throw new IllegalArgumentException("Type " + operandType + " not supported");
@@ -3185,7 +3185,7 @@ private String getDTIScalarColumnDisplayBody(String type) {
if (type.equals("date")) {
return
"Date dt = new Date(0);" +
- " dt.setTime(DateWritable.daysToMillis((int) value));\n" +
+ " dt.setTime(DateWritableV2.daysToMillis((int) value));\n" +
" return \"date \" + dt.toString() + \", \" + getColumnParamString(0, colNum);";
} else {
return
@@ -3197,7 +3197,7 @@ private String getDTIColumnScalarDisplayBody(String type) {
if (type.equals("date")) {
return
"Date dt = new Date(0);" +
- " dt.setTime(DateWritable.daysToMillis((int) value));\n" +
+ " dt.setTime(DateWritableV2.daysToMillis((int) value));\n" +
" return getColumnParamString(0, colNum) + \", date \" + dt.toString();";
} else {
return
@@ -3807,9 +3807,9 @@ private String getOutputWritableType(String primitiveType) throws Exception {
} else if (primitiveType.equals("interval_day_time")) {
return "HiveIntervalDayTimeWritable";
} else if (primitiveType.equals("date")) {
- return "HiveDateWritable";
+ return "DateWritableV2";
} else if (primitiveType.equals("timestamp")) {
- return "HiveTimestampWritable";
+ return "TimestampWritable";
}
throw new Exception("Unimplemented primitive output writable: " + primitiveType);
}