diff --git a/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java b/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java new file mode 100644 index 0000000..563992c --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java @@ -0,0 +1,230 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.type; + +import java.math.BigDecimal; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.apache.commons.lang.builder.HashCodeBuilder; +import org.apache.hive.common.util.DateTimeUtils; + +/** + * Day-time interval type representing an offset in days/hours/minutes/seconds, + * with nanosecond precision. + * 1 day = 24 hours = 1440 minutes = 86400 seconds + */ +public class HiveIntervalDayTime implements Comparable { + + // days/hours/minutes/seconds all represented as seconds + protected long totalSeconds; + protected int nanos; + + public HiveIntervalDayTime() { + } + + public HiveIntervalDayTime(int days, int hours, int minutes, int seconds, int nanos) { + set(days, hours, minutes, seconds, nanos); + } + + public HiveIntervalDayTime(long seconds, int nanos) { + set(seconds, nanos); + } + + public HiveIntervalDayTime(BigDecimal seconds) { + set(seconds); + } + + public HiveIntervalDayTime(HiveIntervalDayTime other) { + set(other.totalSeconds, other.nanos); + } + + public int getDays() { + return (int) TimeUnit.SECONDS.toDays(totalSeconds); + } + + public int getHours() { + return (int) (TimeUnit.SECONDS.toHours(totalSeconds) % TimeUnit.DAYS.toHours(1)); + } + + public int getMinutes() { + return (int) (TimeUnit.SECONDS.toMinutes(totalSeconds) % TimeUnit.HOURS.toMinutes(1)); + } + + public int getSeconds() { + return (int) (totalSeconds % TimeUnit.MINUTES.toSeconds(1)); + } + + public int getNanos() { + return nanos; + } + + /** + * Returns days/hours/minutes all converted into seconds. + * Nanos still need to be retrieved using getNanos() + * @return + */ + public long getTotalSeconds() { + return totalSeconds; + } + + /** + * Ensures that the seconds and nanoseconds fields have consistent sign + */ + protected void normalizeSecondsAndNanos() { + if (totalSeconds > 0 && nanos < 0) { + --totalSeconds; + nanos += DateTimeUtils.NANOS_PER_SEC; + } else if (totalSeconds < 0 && nanos > 0) { + ++totalSeconds; + nanos -= DateTimeUtils.NANOS_PER_SEC; + } + } + + protected void set(int days, int hours, int minutes, int seconds, int nanos) { + long totalSeconds = seconds; + totalSeconds += TimeUnit.DAYS.toSeconds(days); + totalSeconds += TimeUnit.HOURS.toSeconds(hours); + totalSeconds += TimeUnit.MINUTES.toSeconds(minutes); + totalSeconds += TimeUnit.NANOSECONDS.toSeconds(nanos); + nanos = nanos % DateTimeUtils.NANOS_PER_SEC; + + this.totalSeconds = totalSeconds; + this.nanos = nanos; + + normalizeSecondsAndNanos(); + } + + protected void set(long seconds, int nanos) { + this.totalSeconds = seconds; + this.nanos = nanos; + normalizeSecondsAndNanos(); + } + + protected void set(BigDecimal totalSecondsBd) { + long totalSeconds = totalSecondsBd.longValue(); + BigDecimal fractionalSecs = totalSecondsBd.remainder(BigDecimal.ONE); + int nanos = fractionalSecs.multiply(DateTimeUtils.NANOS_PER_SEC_BD).intValue(); + set(totalSeconds, nanos); + } + + public HiveIntervalDayTime negate() { + return new HiveIntervalDayTime(-getTotalSeconds(), -getNanos()); + } + + @Override + public int compareTo(HiveIntervalDayTime other) { + long cmp = this.totalSeconds - other.totalSeconds; + if (cmp == 0) { + cmp = this.nanos - other.nanos; + } + if (cmp != 0) { + cmp = cmp > 0 ? 1 : -1; + } + return (int) cmp; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof HiveIntervalDayTime)) { + return false; + } + return 0 == compareTo((HiveIntervalDayTime) obj); + } + + @Override + public int hashCode() { + return new HashCodeBuilder().append(totalSeconds).append(nanos).toHashCode(); + } + + @Override + public String toString() { + // If normalize() was used, then day-hour-minute-second-nanos should have the same sign. + // This is currently working with that assumption. + boolean isNegative = (totalSeconds < 0 || nanos < 0); + String daySecondSignStr = isNegative ? "-" : ""; + + return String.format("%s%d %02d:%02d:%02d.%09d", + daySecondSignStr, Math.abs(getDays()), + Math.abs(getHours()), Math.abs(getMinutes()), + Math.abs(getSeconds()), Math.abs(getNanos())); + } + + public static HiveIntervalDayTime valueOf(String strVal) { + HiveIntervalDayTime result = null; + if (strVal == null) { + throw new IllegalArgumentException("Interval day-time string was null"); + } + Matcher patternMatcher = PATTERN_MATCHER.get(); + patternMatcher.reset(strVal); + if (patternMatcher.matches()) { + // Parse out the individual parts + try { + // Sign - whether interval is positive or negative + int sign = 1; + String field = patternMatcher.group(1); + if (field != null && field.equals("-")) { + sign = -1; + } + int days = sign * + DateTimeUtils.parseNumericValueWithRange("day", patternMatcher.group(2), + 0, Integer.MAX_VALUE); + byte hours = (byte) (sign * + DateTimeUtils.parseNumericValueWithRange("hour", patternMatcher.group(3), 0, 23)); + byte minutes = (byte) (sign * + DateTimeUtils.parseNumericValueWithRange("minute", patternMatcher.group(4), 0, 59)); + int seconds = 0; + int nanos = 0; + field = patternMatcher.group(5); + if (field != null) { + BigDecimal bdSeconds = new BigDecimal(field); + if (bdSeconds.compareTo(DateTimeUtils.MAX_INT_BD) > 0) { + throw new IllegalArgumentException("seconds value of " + bdSeconds + " too large"); + } + seconds = sign * bdSeconds.intValue(); + nanos = sign * bdSeconds.subtract(new BigDecimal(bdSeconds.toBigInteger())) + .multiply(DateTimeUtils.NANOS_PER_SEC_BD).intValue(); + } + + result = new HiveIntervalDayTime(days, hours, minutes, seconds, nanos); + } catch (Exception err) { + throw new IllegalArgumentException("Error parsing interval day-time string: " + strVal, err); + } + } else { + throw new IllegalArgumentException( + "Interval string does not match day-time format of 'd h:m:s.n': " + strVal); + } + + return result; + } + + // Simple pattern: D H:M:S.nnnnnnnnn + private final static String PARSE_PATTERN = + "([+|-])?(\\d+) (\\d+):(\\d+):((\\d+)(\\.(\\d+))?)"; + + private static final ThreadLocal PATTERN_MATCHER = new ThreadLocal() { + @Override + protected Matcher initialValue() { + return Pattern.compile(PARSE_PATTERN).matcher(""); + } + }; +} diff --git a/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java b/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java new file mode 100644 index 0000000..457269a --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java @@ -0,0 +1,156 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.type; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.apache.hive.common.util.DateTimeUtils; + +public class HiveIntervalYearMonth implements Comparable { + + // years/months represented in months + protected int totalMonths; + + protected final static int MONTHS_PER_YEAR = 12; + + public HiveIntervalYearMonth() { + } + + public HiveIntervalYearMonth(int years, int months) { + set(years, months); + } + + public HiveIntervalYearMonth(int totalMonths) { + set(totalMonths); + } + + public HiveIntervalYearMonth(HiveIntervalYearMonth hiveInterval) { + set(hiveInterval.getTotalMonths()); + } + + // + // Getters + // + + public int getYears() { + return totalMonths / MONTHS_PER_YEAR; + } + + public int getMonths() { + return totalMonths % MONTHS_PER_YEAR; + } + + public int getTotalMonths() { + return totalMonths; + } + + protected void set(int years, int months) { + this.totalMonths = months; + this.totalMonths += years * MONTHS_PER_YEAR; + } + + protected void set(int totalMonths) { + this.totalMonths = totalMonths; + } + + public HiveIntervalYearMonth negate() { + return new HiveIntervalYearMonth(-getTotalMonths()); + } + + // + // Comparison + // + + @Override + public int compareTo(HiveIntervalYearMonth other) { + int cmp = this.getTotalMonths() - other.getTotalMonths(); + + if (cmp != 0) { + cmp = cmp > 0 ? 1 : -1; + } + return cmp; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof HiveIntervalYearMonth)) { + return false; + } + return 0 == compareTo((HiveIntervalYearMonth) obj); + } + + @Override + public int hashCode() { + return totalMonths; + } + + @Override + public String toString() { + String yearMonthSignStr = totalMonths >= 0 ? "" : "-"; + + return String.format("%s%d-%d", + yearMonthSignStr, Math.abs(getYears()), Math.abs(getMonths())); + } + + public static HiveIntervalYearMonth valueOf(String strVal) { + HiveIntervalYearMonth result = null; + if (strVal == null) { + throw new IllegalArgumentException("Interval year-month string was null"); + } + Matcher patternMatcher = PATTERN_MATCHER.get(); + patternMatcher.reset(strVal); + if (patternMatcher.matches()) { + // Parse out the individual parts + try { + // Sign - whether interval is positive or negative + int sign = 1; + String field = patternMatcher.group(1); + if (field != null && field.equals("-")) { + sign = -1; + } + int years = sign * + DateTimeUtils.parseNumericValueWithRange("year", patternMatcher.group(2), + 0, Integer.MAX_VALUE); + byte months = (byte) (sign * + DateTimeUtils.parseNumericValueWithRange("month", patternMatcher.group(3), 0, 11)); + result = new HiveIntervalYearMonth(years, months); + } catch (Exception err) { + throw new IllegalArgumentException("Error parsing interval year-month string: " + strVal, err); + } + } else { + throw new IllegalArgumentException( + "Interval string does not match year-month format of 'y-m': " + strVal); + } + + return result; + } + + // Simple pattern: Y-M + private final static String PARSE_PATTERN = + "([+|-])?(\\d+)-(\\d+)"; + + private static final ThreadLocal PATTERN_MATCHER = new ThreadLocal() { + @Override + protected Matcher initialValue() { + return Pattern.compile(PARSE_PATTERN).matcher(""); + } + }; +} diff --git a/common/src/java/org/apache/hadoop/hive/common/type/MutableHiveIntervalDayTime.java b/common/src/java/org/apache/hadoop/hive/common/type/MutableHiveIntervalDayTime.java new file mode 100644 index 0000000..52b22fe --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/type/MutableHiveIntervalDayTime.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.common.type; + +public class MutableHiveIntervalDayTime extends HiveIntervalDayTime { + public void setValue(int days, int hours, int minutes, int seconds, int nanos) { + set(days, hours, minutes, seconds, nanos); + } + + public void setValue(HiveIntervalDayTime other) { + set(other.getDays(), other.getHours(), other.getMinutes(), + other.getSeconds(), other.getNanos()); + } + + public void setValue(long totalSeconds, int nanos) { + set(totalSeconds, nanos); + } +} diff --git a/common/src/java/org/apache/hadoop/hive/common/type/MutableHiveIntervalYearMonth.java b/common/src/java/org/apache/hadoop/hive/common/type/MutableHiveIntervalYearMonth.java new file mode 100644 index 0000000..bf3e769 --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/type/MutableHiveIntervalYearMonth.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.common.type; + +public class MutableHiveIntervalYearMonth extends HiveIntervalYearMonth { + public void setValue(int years, int months) { + set(years, months); + } + + public void setValue(HiveIntervalYearMonth other) { + set(other.getYears(), other.getMonths()); + } + + public void setValue(int totalMonths) { + set(totalMonths); + } +} diff --git a/common/src/java/org/apache/hive/common/util/DateTimeUtils.java b/common/src/java/org/apache/hive/common/util/DateTimeUtils.java new file mode 100644 index 0000000..423aefe --- /dev/null +++ b/common/src/java/org/apache/hive/common/util/DateTimeUtils.java @@ -0,0 +1,192 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.common.util; + +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Timestamp; +import java.util.concurrent.TimeUnit; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; + +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.LocalDate; + +public class DateTimeUtils { + + public static final int NANOS_PER_SEC = 1000000000; + + public static final BigDecimal MAX_INT_BD = new BigDecimal(Integer.MAX_VALUE); + public static final BigDecimal NANOS_PER_SEC_BD = new BigDecimal(NANOS_PER_SEC); + + private static class NanosResult { + public int seconds; + public int nanos; + } + + private static void addNanos(int leftNanos, int rightNanos, NanosResult result) { + // This is assuming left/right side are each < 1000000000 nanos + result.seconds = 0; + result.nanos = leftNanos + rightNanos; + if (result.nanos < 0) { + result.seconds = -1; + result.nanos += NANOS_PER_SEC; + } else if (result.nanos >= NANOS_PER_SEC) { + result.seconds = 1; + result.nanos -= NANOS_PER_SEC; + } + } + + // + // Operations involving/returning year-month intervals + // + + public static Timestamp add(Timestamp ts, HiveIntervalYearMonth interval) { + if (ts == null || interval == null) { + return null; + } + + // Attempt to match Oracle semantics for timestamp arithmetic, + // where timestamp arithmetic is done in UTC, then converted back to local timezone + DateTime dtUtc = new DateTime(ts.getTime()).toDateTime(DateTimeZone.UTC); + DateTime dtResult = dtUtc.plusMonths(interval.getTotalMonths()).toDateTime(DateTimeZone.getDefault()); + + Timestamp tsResult = new Timestamp(dtResult.getMillis()); + tsResult.setNanos(ts.getNanos()); + + return tsResult; + } + + public static Date add(Date dt, HiveIntervalYearMonth interval) { + if (dt == null || interval == null) { + return null; + } + + LocalDate localDateResult = new LocalDate(dt.getTime()).plusMonths(interval.getTotalMonths()); + return new Date(localDateResult.toDateTimeAtStartOfDay().getMillis()); + } + + public static HiveIntervalYearMonth add(HiveIntervalYearMonth left, HiveIntervalYearMonth right) { + HiveIntervalYearMonth result = null; + if (left == null || right == null) { + return null; + } + + result = new HiveIntervalYearMonth(left.getTotalMonths() + right.getTotalMonths()); + return result; + } + + public static Timestamp subtract(Timestamp left, HiveIntervalYearMonth right) { + if (left == null || right == null) { + return null; + } + return add(left, right.negate()); + } + + + public static Date subtract(Date left, HiveIntervalYearMonth right) { + if (left == null || right == null) { + return null; + } + return add(left, right.negate()); + } + + public static HiveIntervalYearMonth subtract(HiveIntervalYearMonth left, HiveIntervalYearMonth right) { + if (left == null || right == null) { + return null; + } + return add(left, right.negate()); + } + + // + // Operations involving/returning day-time intervals + // + + public static Timestamp add(Timestamp ts, HiveIntervalDayTime interval) { + if (ts == null || interval == null) { + return null; + } + + NanosResult nanosResult = new NanosResult(); + addNanos(ts.getNanos(), interval.getNanos(), nanosResult); + + long newMillis = ts.getTime() + + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds); + Timestamp tsResult = new Timestamp(newMillis); + tsResult.setNanos(nanosResult.nanos); + return tsResult; + } + + public static HiveIntervalDayTime add(HiveIntervalDayTime left, HiveIntervalDayTime right) { + HiveIntervalDayTime result = null; + if (left == null || right == null) { + return null; + } + + NanosResult nanosResult = new NanosResult(); + addNanos(left.getNanos(), right.getNanos(), nanosResult); + + long totalSeconds = left.getTotalSeconds() + right.getTotalSeconds() + nanosResult.seconds; + result = new HiveIntervalDayTime(totalSeconds, nanosResult.nanos); + return result; + } + + public static Timestamp subtract(Timestamp left, HiveIntervalDayTime right) { + if (left == null || right == null) { + return null; + } + return add(left, right.negate()); + } + + public static HiveIntervalDayTime subtract(HiveIntervalDayTime left, HiveIntervalDayTime right) { + if (left == null || right == null) { + return null; + } + return add(left, right.negate()); + } + + public static HiveIntervalDayTime subtract(Timestamp left, Timestamp right) { + HiveIntervalDayTime result = null; + if (left == null || right == null) { + return null; + } + + NanosResult nanosResult = new NanosResult(); + addNanos(left.getNanos(), -(right.getNanos()), nanosResult); + + long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getTime()) + - TimeUnit.MILLISECONDS.toSeconds(right.getTime()) + nanosResult.seconds; + result = new HiveIntervalDayTime(totalSeconds, nanosResult.nanos); + return result; + } + + public static int parseNumericValueWithRange(String fieldName, + String strVal, int minValue, int maxValue) throws IllegalArgumentException { + int result = 0; + if (strVal != null) { + result = Integer.parseInt(strVal); + if (result < minValue || result > maxValue) { + throw new IllegalArgumentException(String.format("%s value %d outside range [%d, %d]", + fieldName, result, minValue, maxValue)); + } + } + return result; + } +} diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java new file mode 100644 index 0000000..a6d0903 --- /dev/null +++ b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java @@ -0,0 +1,183 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.type; + +import org.junit.*; + +import static org.junit.Assert.*; +import com.google.code.tempusfugit.concurrency.annotations.*; +import com.google.code.tempusfugit.concurrency.*; + +public class TestHiveIntervalDayTime { + + @Rule public ConcurrentRule concurrentRule = new ConcurrentRule(); + @Rule public RepeatingRule repeatingRule = new RepeatingRule(); + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testGetters() throws Exception { + HiveIntervalDayTime i1 = new HiveIntervalDayTime(3, 4, 5, 6, 7); + + assertEquals(3, i1.getDays()); + assertEquals(4, i1.getHours()); + assertEquals(5, i1.getMinutes()); + assertEquals(6, i1.getSeconds()); + assertEquals(7, i1.getNanos()); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testCompare() throws Exception { + HiveIntervalDayTime i1 = new HiveIntervalDayTime(3, 4, 5, 6, 7); + HiveIntervalDayTime i2 = new HiveIntervalDayTime(3, 4, 5, 6, 7); + HiveIntervalDayTime i3 = new HiveIntervalDayTime(3, 4, 8, 9, 10); + HiveIntervalDayTime i4 = new HiveIntervalDayTime(3, 4, 8, 9, 5); + + // compareTo() + assertEquals(i1 + " compareTo " + i1, 0, i1.compareTo(i1)); + assertEquals(i1 + " compareTo " + i2, 0, i1.compareTo(i2)); + assertEquals(i2 + " compareTo " + i1, 0, i2.compareTo(i1)); + assertEquals(i3 + " compareTo " + i3, 0, i3.compareTo(i3)); + + assertTrue(i1 + " compareTo " + i3, 0 > i1.compareTo(i3)); + assertTrue(i3 + " compareTo " + i1, 0 < i3.compareTo(i1)); + + // equals() + assertTrue(i1 + " equals " + i1, i1.equals(i1)); + assertTrue(i1 + " equals " + i2, i1.equals(i2)); + assertFalse(i1 + " equals " + i3, i1.equals(i3)); + assertFalse(i3 + " equals " + i1, i3.equals(i1)); + assertFalse(i3 + " equals " + i4, i3.equals(i4)); + + // hashCode() + assertEquals(i1 + " hashCode " + i1, i1.hashCode(), i1.hashCode()); + assertEquals(i1 + " hashCode " + i1, i1.hashCode(), i2.hashCode()); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testValueOf() throws Exception { + HiveIntervalDayTime i1 = HiveIntervalDayTime.valueOf("3 04:05:06.123456"); + assertEquals(3, i1.getDays()); + assertEquals(4, i1.getHours()); + assertEquals(5, i1.getMinutes()); + assertEquals(6, i1.getSeconds()); + assertEquals(123456000, i1.getNanos()); + + HiveIntervalDayTime i2 = HiveIntervalDayTime.valueOf("+3 04:05:06"); + assertEquals(3, i2.getDays()); + assertEquals(4, i2.getHours()); + assertEquals(5, i2.getMinutes()); + assertEquals(6, i2.getSeconds()); + assertEquals(0, i2.getNanos()); + + HiveIntervalDayTime i3 = HiveIntervalDayTime.valueOf("-12 13:14:15.987654321"); + assertEquals(-12, i3.getDays()); + assertEquals(-13, i3.getHours()); + assertEquals(-14, i3.getMinutes()); + assertEquals(-15, i3.getSeconds()); + assertEquals(-987654321, i3.getNanos()); + + HiveIntervalDayTime i4 = HiveIntervalDayTime.valueOf("-0 0:0:0.000000012"); + assertEquals(0, i4.getDays()); + assertEquals(0, i4.getHours()); + assertEquals(0, i4.getMinutes()); + assertEquals(0, i4.getSeconds()); + assertEquals(-12, i4.getNanos()); + + // Invalid values + String[] invalidValues = { + null, + "abc", + "0-11", + "0 60:0:0", + "0 0:60:0" + }; + for (String invalidValue : invalidValues) { + boolean caughtException = false; + try { + HiveIntervalDayTime.valueOf(invalidValue); + fail("Expected exception"); + } catch (IllegalArgumentException err) { + caughtException = true; + } + assertTrue("Expected exception", caughtException); + } + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testToString() throws Exception { + assertEquals("0 00:00:00.000000000", HiveIntervalDayTime.valueOf("0 00:00:00").toString()); + assertEquals("3 04:05:06.123456000", HiveIntervalDayTime.valueOf("3 04:05:06.123456").toString()); + assertEquals("-3 04:05:06.123456000", HiveIntervalDayTime.valueOf("-3 04:05:06.123456").toString()); + assertEquals("1 00:00:00.000000000", HiveIntervalDayTime.valueOf("1 00:00:00").toString()); + assertEquals("-1 00:00:00.000000000", HiveIntervalDayTime.valueOf("-1 00:00:00").toString()); + assertEquals("0 00:00:00.880000000", HiveIntervalDayTime.valueOf("0 00:00:00.88").toString()); + assertEquals("-0 00:00:00.880000000", HiveIntervalDayTime.valueOf("-0 00:00:00.88").toString()); + + // Mixed sign cases + assertEquals("-3 04:05:06.000000007", + new HiveIntervalDayTime(-3, -4, -5, -6, -7).toString()); + assertEquals("3 04:05:06.000000007", + new HiveIntervalDayTime(3, 4, 5, 6, 7).toString()); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testNormalize() throws Exception { + HiveIntervalDayTime i1 = new HiveIntervalDayTime(50, 48, 3, 5400, 2000000123); + assertEquals(HiveIntervalDayTime.valueOf("52 1:33:2.000000123"), i1); + assertEquals(52, i1.getDays()); + assertEquals(1, i1.getHours()); + assertEquals(33, i1.getMinutes()); + assertEquals(2, i1.getSeconds()); + assertEquals(123, i1.getNanos()); + + assertEquals(HiveIntervalDayTime.valueOf("0 0:0:0"), + new HiveIntervalDayTime(0, 0, 0, 0, 0)); + assertEquals(HiveIntervalDayTime.valueOf("0 0:0:0"), + new HiveIntervalDayTime(2, -48, 0, 1, -1000000000)); + assertEquals(HiveIntervalDayTime.valueOf("0 0:0:0"), + new HiveIntervalDayTime(-2, 48, 0, -1, 1000000000)); + assertEquals(HiveIntervalDayTime.valueOf("1 0:0:0"), + new HiveIntervalDayTime(-1, 48, 0, 0, 0)); + assertEquals(HiveIntervalDayTime.valueOf("-1 0:0:0"), + new HiveIntervalDayTime(1, -48, 0, 0, 0)); + assertEquals(HiveIntervalDayTime.valueOf("0 23:59:59.999999999"), + new HiveIntervalDayTime(1, 0, 0, 0, -1)); + assertEquals(HiveIntervalDayTime.valueOf("-0 23:59:59.999999999"), + new HiveIntervalDayTime(-1, 0, 0, 0, 1)); + + // -1 day 10 hrs 11 mins 172800 secs = -1 day 10 hrs 11 mins + 2 days = 1 day 10 hrs 11 mins + assertEquals(HiveIntervalDayTime.valueOf("1 10:11:0"), + new HiveIntervalDayTime(-1, 10, 11, 172800, 0)); + + i1 = new HiveIntervalDayTime(480, 480, 0, 5400, 2000000123); + assertEquals(500, i1.getDays()); + assertEquals(1, i1.getHours()); + assertEquals(30, i1.getMinutes()); + assertEquals(2, i1.getSeconds()); + assertEquals(123, i1.getNanos()); + } +} diff --git a/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java new file mode 100644 index 0000000..320835e --- /dev/null +++ b/common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java @@ -0,0 +1,136 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.type; + +import org.junit.*; +import static org.junit.Assert.*; +import com.google.code.tempusfugit.concurrency.annotations.*; +import com.google.code.tempusfugit.concurrency.*; + +public class TestHiveIntervalYearMonth { + + @Rule public ConcurrentRule concurrentRule = new ConcurrentRule(); + @Rule public RepeatingRule repeatingRule = new RepeatingRule(); + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testGetters() throws Exception { + HiveIntervalYearMonth i1 = new HiveIntervalYearMonth(1, 2); + assertEquals(1, i1.getYears()); + assertEquals(2, i1.getMonths()); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testCompare() throws Exception { + HiveIntervalYearMonth i1 = new HiveIntervalYearMonth(1, 2); + HiveIntervalYearMonth i2 = new HiveIntervalYearMonth(1, 2); + HiveIntervalYearMonth i3 = new HiveIntervalYearMonth(1, 3); + + // compareTo() + assertEquals(i1 + " compareTo " + i1, 0, i1.compareTo(i1)); + assertEquals(i1 + " compareTo " + i2, 0, i1.compareTo(i2)); + assertEquals(i2 + " compareTo " + i1, 0, i2.compareTo(i1)); + assertEquals(i3 + " compareTo " + i3, 0, i3.compareTo(i3)); + + assertTrue(i1 + " compareTo " + i3, 0 > i1.compareTo(i3)); + assertTrue(i3 + " compareTo " + i1, 0 < i3.compareTo(i1)); + + // equals() + assertTrue(i1 + " equals " + i1, i1.equals(i1)); + assertTrue(i1 + " equals " + i2, i1.equals(i2)); + assertFalse(i1 + " equals " + i3, i1.equals(i3)); + assertFalse(i3 + " equals " + i1, i3.equals(i1)); + + // hashCode() + assertEquals(i1 + " hashCode " + i1, i1.hashCode(), i1.hashCode()); + assertEquals(i1 + " hashCode " + i1, i1.hashCode(), i2.hashCode()); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testValueOf() throws Exception { + HiveIntervalYearMonth i1 = HiveIntervalYearMonth.valueOf("1-2"); + assertEquals(1, i1.getYears()); + assertEquals(2, i1.getMonths()); + + HiveIntervalYearMonth i2 = HiveIntervalYearMonth.valueOf("+8-9"); + assertEquals(8, i2.getYears()); + assertEquals(9, i2.getMonths()); + + HiveIntervalYearMonth i3 = HiveIntervalYearMonth.valueOf("-10-11"); + assertEquals(-10, i3.getYears()); + assertEquals(-11, i3.getMonths()); + + HiveIntervalYearMonth i4 = HiveIntervalYearMonth.valueOf("-0-0"); + assertEquals(0, i4.getYears()); + assertEquals(0, i4.getMonths()); + + // Invalid values + String[] invalidValues = { + null, + "abc", + "0-12", + "0 1:2:3" + }; + for (String invalidValue : invalidValues) { + boolean caughtException = false; + try { + HiveIntervalYearMonth.valueOf(invalidValue); + fail("Expected exception"); + } catch (IllegalArgumentException err) { + caughtException = true; + } + assertTrue("Expected exception", caughtException); + } + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testToString() throws Exception { + assertEquals("0-0", HiveIntervalYearMonth.valueOf("0-0").toString()); + assertEquals("1-2", HiveIntervalYearMonth.valueOf("1-2").toString()); + assertEquals("-1-2", HiveIntervalYearMonth.valueOf("-1-2").toString()); + assertEquals("1-0", HiveIntervalYearMonth.valueOf("1-0").toString()); + assertEquals("-1-0", HiveIntervalYearMonth.valueOf("-1-0").toString()); + assertEquals("0-0", HiveIntervalYearMonth.valueOf("-0-0").toString()); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testNormalize() throws Exception { + HiveIntervalYearMonth i1 = new HiveIntervalYearMonth(1, -6); + assertEquals(HiveIntervalYearMonth.valueOf("0-6"), i1); + assertEquals(0, i1.getYears()); + assertEquals(6, i1.getMonths()); + + assertEquals(HiveIntervalYearMonth.valueOf("0-0"), new HiveIntervalYearMonth(0, 0)); + assertEquals(HiveIntervalYearMonth.valueOf("0-0"), new HiveIntervalYearMonth(-1, 12)); + assertEquals(HiveIntervalYearMonth.valueOf("0-4"), new HiveIntervalYearMonth(-1, 16)); + assertEquals(HiveIntervalYearMonth.valueOf("0-11"), new HiveIntervalYearMonth(1, -1)); + assertEquals(HiveIntervalYearMonth.valueOf("-0-11"), new HiveIntervalYearMonth(-1, 1)); + + // -5 years + 121 months = -5 years + 10 years + 1 month = 5 years 1 month + assertEquals(HiveIntervalYearMonth.valueOf("5-1"), new HiveIntervalYearMonth(-5, 121)); + } +} diff --git a/common/src/test/org/apache/hive/common/util/TestDateTimeUtils.java b/common/src/test/org/apache/hive/common/util/TestDateTimeUtils.java new file mode 100644 index 0000000..e455547 --- /dev/null +++ b/common/src/test/org/apache/hive/common/util/TestDateTimeUtils.java @@ -0,0 +1,457 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hive.common.util; + +import java.sql.Date; +import java.sql.Timestamp; +import java.util.TimeZone; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.junit.*; + +import static org.junit.Assert.*; + +public class TestDateTimeUtils { + + @Test + public void testTimestampIntervalYearMonthArithmetic() throws Exception { + char plus = '+'; + char minus = '-'; + + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-0", + "2001-01-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "1-1", + "2002-02-01 01:02:03.456"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "10-0", + "2011-01-01 01:02:03.456"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "0-11", + "2001-12-01 01:02:03.456"); + checkTimestampIntervalYearMonthArithmetic("2001-03-01 01:02:03.500", plus, "1-11", + "2003-02-01 01:02:03.500"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.500", plus, "-1-1", + "1999-12-01 01:02:03.500"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.500", plus, "-0-0", + "2001-01-01 01:02:03.500"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.123456789", plus, "-0-0", + "2001-01-01 01:02:03.123456789"); + + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "0-0", + "2001-01-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "10-0", + "1991-01-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "-10-0", + "2011-01-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "8-2", + "1992-11-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "-8-2", + "2009-03-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.123456789", minus, "8-2", + "1992-11-01 01:02:03.123456789"); + + checkTimestampIntervalYearMonthArithmetic(null, plus, "1-1", + null); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, null, + null); + checkTimestampIntervalYearMonthArithmetic(null, minus, "1-1", + null); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, null, + null); + + // End of the month behavior + checkTimestampIntervalYearMonthArithmetic("2001-01-28 01:02:03", plus, "0-1", + "2001-02-28 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-29 01:02:03", plus, "0-1", + "2001-02-28 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-30 01:02:03", plus, "0-1", + "2001-02-28 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-31 01:02:03", plus, "0-1", + "2001-02-28 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-02-28 01:02:03", plus, "0-1", + "2001-03-28 01:02:03"); + + // Test that timestamp arithmetic is done in UTC and then converted back to local timezone, + // matching Oracle behavior. + TimeZone originalTz = TimeZone.getDefault(); + try { + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-6", + "2001-07-01 02:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-07-01 01:02:03", plus, "0-6", + "2002-01-01 00:02:03"); + + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-6", + "2001-07-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-07-01 01:02:03", plus, "0-6", + "2002-01-01 01:02:03"); + } finally { + TimeZone.setDefault(originalTz); + } + } + + @Test + public void testDateIntervalYearMonthArithmetic() throws Exception { + char plus = '+'; + char minus = '-'; + + checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-0", "2001-01-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-1", "2001-02-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-6", "2001-07-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "1-0", "2002-01-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "1-1", "2002-02-01"); + checkDateIntervalDayTimeArithmetic("2001-10-10", plus, "1-6", "2003-04-10"); + checkDateIntervalDayTimeArithmetic("2003-04-10", plus, "-1-6", "2001-10-10"); + + checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "0-0", "2001-01-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "0-1", "2000-12-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "1-0", "2000-01-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "1-1", "1999-12-01"); + checkDateIntervalDayTimeArithmetic("2001-10-10", minus, "1-6", "2000-04-10"); + checkDateIntervalDayTimeArithmetic("2003-04-10", minus, "-1-6", "2004-10-10"); + + // end of month behavior + checkDateIntervalDayTimeArithmetic("2001-01-28", plus, "0-1", "2001-02-28"); + checkDateIntervalDayTimeArithmetic("2001-01-29", plus, "0-1", "2001-02-28"); + checkDateIntervalDayTimeArithmetic("2001-01-30", plus, "0-1", "2001-02-28"); + checkDateIntervalDayTimeArithmetic("2001-01-31", plus, "0-1", "2001-02-28"); + checkDateIntervalDayTimeArithmetic("2001-01-31", plus, "0-2", "2001-03-31"); + checkDateIntervalDayTimeArithmetic("2001-02-28", plus, "0-1", "2001-03-28"); + // leap year + checkDateIntervalDayTimeArithmetic("2004-01-28", plus, "0-1", "2004-02-28"); + checkDateIntervalDayTimeArithmetic("2004-01-29", plus, "0-1", "2004-02-29"); + checkDateIntervalDayTimeArithmetic("2004-01-30", plus, "0-1", "2004-02-29"); + checkDateIntervalDayTimeArithmetic("2004-01-31", plus, "0-1", "2004-02-29"); + } + + @Test + public void testIntervalYearMonthArithmetic() throws Exception { + char plus = '+'; + char minus = '-'; + + checkIntervalYearMonthArithmetic("0-0", plus, "0-0", "0-0"); + checkIntervalYearMonthArithmetic("0-0", plus, "4-5", "4-5"); + checkIntervalYearMonthArithmetic("4-5", plus, "0-0", "4-5"); + checkIntervalYearMonthArithmetic("0-0", plus, "1-1", "1-1"); + checkIntervalYearMonthArithmetic("1-1", plus, "0-0", "1-1"); + + checkIntervalYearMonthArithmetic("0-0", minus, "0-0", "0-0"); + checkIntervalYearMonthArithmetic("0-0", minus, "1-0", "-1-0"); + checkIntervalYearMonthArithmetic("1-2", minus, "1-1", "0-1"); + checkIntervalYearMonthArithmetic("0-0", minus, "1-1", "-1-1"); + checkIntervalYearMonthArithmetic("-1-1", minus, "1-1", "-2-2"); + checkIntervalYearMonthArithmetic("-1-1", minus, "-1-1", "0-0"); + + checkIntervalYearMonthArithmetic(null, plus, "1-1", null); + checkIntervalYearMonthArithmetic("1-1", plus, null, null); + checkIntervalYearMonthArithmetic(null, minus, "1-1", null); + checkIntervalYearMonthArithmetic("1-1", minus, null, null); + } + + @Test + public void testTimestampIntervalDayTimeArithmetic() throws Exception { + char plus = '+'; + char minus = '-'; + + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, "1 1:1:1", + "2001-01-02 02:03:04"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1", + "2001-01-02 02:03:04.456"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1.555", + "2001-01-02 02:03:05.011"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, "1 1:1:1.555555555", + "2001-01-02 02:03:04.555555555"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1.555555555", + "2001-01-02 02:03:05.011555555"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.499", + "2001-01-02 02:03:04.999"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.500", + "2001-01-02 02:03:05.0"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.501", + "2001-01-02 02:03:05.001"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.4999999999", + "2001-01-02 02:03:04.999999999"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.500", + "2001-01-02 02:03:05.0"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.500000001", + "2001-01-02 02:03:05.000000001"); + + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, "0 01:02:03", + "2001-01-01 00:00:00"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, "0 0:0:0", + "2001-01-01 01:02:03"); + + checkTsIntervalDayTimeArithmetic(null, plus, "1 1:1:1.555555555", + null); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, null, + null); + checkTsIntervalDayTimeArithmetic(null, minus, "1 1:1:1.555555555", + null); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, null, + null); + + // Try some time zone boundaries + TimeZone originalTz = TimeZone.getDefault(); + try { + // America/Los_Angeles DST dates - 2015-03-08 02:00:00/2015-11-01 02:00:00 + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); + + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:58", plus, "0 0:0:01", + "2015-03-08 01:59:59"); + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59", plus, "0 0:0:01", + "2015-03-08 03:00:00"); + checkTsIntervalDayTimeArithmetic("2015-03-08 03:00:00", minus, "0 0:0:01", + "2015-03-08 01:59:59"); + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59.995", plus, "0 0:0:0.005", + "2015-03-08 03:00:00"); + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59.995", plus, "0 0:0:0.0051", + "2015-03-08 03:00:00.0001"); + checkTsIntervalDayTimeArithmetic("2015-03-08 03:00:00", minus, "0 0:0:0.005", + "2015-03-08 01:59:59.995"); + checkTsIntervalDayTimeArithmetic("2015-11-01 01:59:58", plus, "0 0:0:01", + "2015-11-01 01:59:59"); + checkTsIntervalDayTimeArithmetic("2015-11-01 01:59:59", plus, "0 0:0:01", + "2015-11-01 02:00:00"); + + // UTC has no such adjustment + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:58", plus, "0 0:0:01", + "2015-03-08 01:59:59"); + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59", plus, "0 0:0:01", + "2015-03-08 02:00:00"); + } finally { + TimeZone.setDefault(originalTz); + } + } + + @Test + public void testIntervalDayTimeArithmetic() throws Exception { + char plus = '+'; + char minus = '-'; + + checkIntervalDayTimeArithmetic("0 0:0:0", plus, "0 0:0:0", "0 0:0:0"); + checkIntervalDayTimeArithmetic("0 01:02:03", plus, "6 0:0:0.0001", "6 01:02:03.0001"); + checkIntervalDayTimeArithmetic("6 0:0:0.0001", plus, "0 01:02:03", "6 01:02:03.0001"); + checkIntervalDayTimeArithmetic("0 01:02:03", plus, "1 10:10:10.0001", "1 11:12:13.0001"); + checkIntervalDayTimeArithmetic("1 10:10:10.0001", plus, "0 01:02:03", "1 11:12:13.0001"); + checkIntervalDayTimeArithmetic("0 0:0:0.900000000", plus, "0 0:0:0.099999999", "0 0:0:0.999999999"); + checkIntervalDayTimeArithmetic("0 0:0:0.900000001", plus, "0 0:0:0.099999999", "0 0:0:1"); + checkIntervalDayTimeArithmetic("0 0:0:0.900000002", plus, "0 0:0:0.099999999", "0 0:0:1.000000001"); + + checkIntervalDayTimeArithmetic("0 0:0:0", minus, "0 0:0:0", "0 0:0:0"); + checkIntervalDayTimeArithmetic("0 0:0:0", minus, "0 0:0:0.123", "-0 0:0:0.123"); + checkIntervalDayTimeArithmetic("3 4:5:6.789", minus, "1 1:1:1.111", "2 3:4:5.678"); + checkIntervalDayTimeArithmetic("0 0:0:0.0", minus, "1 1:1:1.111", "-1 1:1:1.111"); + checkIntervalDayTimeArithmetic("-1 1:1:1.222", minus, "1 1:1:1.111", "-2 2:2:2.333"); + checkIntervalDayTimeArithmetic("-1 1:1:1.111", minus, "-1 1:1:1.111", "0 0:0:0"); + + checkIntervalDayTimeArithmetic(null, plus, "1 1:1:1.111", null); + checkIntervalDayTimeArithmetic("1 1:1:1.111", plus, null, null); + checkIntervalDayTimeArithmetic(null, minus, "1 1:1:1.111", null); + checkIntervalDayTimeArithmetic("1 1:1:1.111", minus, null, null); + } + + @Test + public void testTimestampSubtraction() throws Exception { + checkTsArithmetic("2001-01-01 00:00:00", "2001-01-01 00:00:00", "0 0:0:0"); + checkTsArithmetic("2002-02-02 01:01:01", "2001-01-01 00:00:00", "397 1:1:1"); + checkTsArithmetic("2001-01-01 00:00:00", "2002-02-02 01:01:01", "-397 1:1:1"); + checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 00:00:00", "1 0:0:0"); + checkTsArithmetic("2014-12-31 00:00:00", "2015-01-01 00:00:00", "-1 0:0:0"); + checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 23:59:59", "0 0:0:01"); + checkTsArithmetic("2014-12-31 23:59:59", "2015-01-01 00:00:00", "-0 0:0:01"); + checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 23:59:59.9999", "0 0:0:00.0001"); + checkTsArithmetic("2014-12-31 23:59:59.9999", "2015-01-01 00:00:00", "-0 0:0:00.0001"); + checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 11:12:13.000000001", "0 12:47:46.999999999"); + checkTsArithmetic("2014-12-31 11:12:13.000000001", "2015-01-01 00:00:00", "-0 12:47:46.999999999"); + + // Test that timestamp arithmetic is done in UTC and then converted back to local timezone, + // matching Oracle behavior. + TimeZone originalTz = TimeZone.getDefault(); + try { + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); + checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 1:0:0"); + checkTsArithmetic("1999-09-15 00:00:00", "1999-12-15 00:00:00", "-91 1:0:0"); + checkTsArithmetic("1999-12-15 00:00:00", "1995-09-15 00:00:00", "1552 1:0:0"); + checkTsArithmetic("1995-09-15 00:00:00", "1999-12-15 00:00:00", "-1552 1:0:0"); + + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 0:0:0"); + checkTsArithmetic("1999-09-15 00:00:00", "1999-12-15 00:00:00", "-91 0:0:0"); + checkTsArithmetic("1999-12-15 00:00:00", "1995-09-15 00:00:00", "1552 0:0:0"); + checkTsArithmetic("1995-09-15 00:00:00", "1999-12-15 00:00:00", "-1552 0:0:0"); + } finally { + TimeZone.setDefault(originalTz); + } + } + + private static void checkTimestampIntervalYearMonthArithmetic( + String left, char operationType, String right, String expected) throws Exception { + Timestamp leftTs = null; + if (left != null) { + leftTs = Timestamp.valueOf(left); + } + HiveIntervalYearMonth rightInterval = null; + if (right != null) { + rightInterval = HiveIntervalYearMonth.valueOf(right); + } + Timestamp expectedResult = null; + if (expected != null) { + expectedResult = Timestamp.valueOf(expected); + } + Timestamp testResult = null; + + switch (operationType) { + case '-': + testResult = DateTimeUtils.subtract(leftTs, rightInterval); + break; + case '+': + testResult = DateTimeUtils.add(leftTs, rightInterval); + break; + default: + throw new IllegalArgumentException("Invalid operation " + operationType); + } + + assertEquals(String.format("%s %s %s", leftTs, operationType, rightInterval), + expectedResult, testResult); + } + + private static void checkDateIntervalDayTimeArithmetic( + String left, char operationType, String right, String expected) throws Exception { + Date leftDt = null; + if (left != null) { + leftDt = Date.valueOf(left); + } + HiveIntervalYearMonth rightInterval = null; + if (right != null) { + rightInterval = HiveIntervalYearMonth.valueOf(right); + } + Date expectedResult = null; + if (expected != null) { + expectedResult = Date.valueOf(expected); + } + Date testResult = null; + + switch (operationType) { + case '-': + testResult = DateTimeUtils.subtract(leftDt, rightInterval); + break; + case '+': + testResult = DateTimeUtils.add(leftDt, rightInterval); + break; + default: + throw new IllegalArgumentException("Invalid operation " + operationType); + } + + assertEquals(String.format("%s %s %s", leftDt, operationType, rightInterval), + expectedResult, testResult); + } + + private static void checkIntervalYearMonthArithmetic( + String left, char operationType, String right, String expected) throws Exception { + HiveIntervalYearMonth leftInterval = left == null ? null: HiveIntervalYearMonth.valueOf(left); + HiveIntervalYearMonth rightInterval = right == null ? null : HiveIntervalYearMonth.valueOf(right); + HiveIntervalYearMonth expectedResult = expected == null ? null : HiveIntervalYearMonth.valueOf(expected); + HiveIntervalYearMonth testResult = null; + + switch (operationType) { + case '-': + testResult = DateTimeUtils.subtract(leftInterval, rightInterval); + break; + case '+': + testResult = DateTimeUtils.add(leftInterval, rightInterval); + break; + default: + throw new IllegalArgumentException("Invalid operation " + operationType); + } + + assertEquals(String.format("%s %s %s", leftInterval, operationType, rightInterval), + expectedResult, testResult); + } + + private static void checkTsIntervalDayTimeArithmetic( + String left, char operationType, String right, String expected) throws Exception { + Timestamp leftTs = null; + if (left != null) { + leftTs = Timestamp.valueOf(left); + } + HiveIntervalDayTime rightInterval = right == null ? null : HiveIntervalDayTime.valueOf(right); + Timestamp expectedResult = null; + if (expected != null) { + expectedResult = Timestamp.valueOf(expected); + } + Timestamp testResult = null; + + switch (operationType) { + case '-': + testResult = DateTimeUtils.subtract(leftTs, rightInterval); + break; + case '+': + testResult = DateTimeUtils.add(leftTs, rightInterval); + break; + default: + throw new IllegalArgumentException("Invalid operation " + operationType); + } + + assertEquals(String.format("%s %s %s", leftTs, operationType, rightInterval), + expectedResult, testResult); + } + + private static void checkIntervalDayTimeArithmetic( + String left, char operationType, String right, String expected) throws Exception { + HiveIntervalDayTime leftInterval = left == null ? null : HiveIntervalDayTime.valueOf(left); + HiveIntervalDayTime rightInterval = right == null ? null : HiveIntervalDayTime.valueOf(right); + HiveIntervalDayTime expectedResult = expected == null ? null : HiveIntervalDayTime.valueOf(expected); + HiveIntervalDayTime testResult = null; + + switch (operationType) { + case '-': + testResult = DateTimeUtils.subtract(leftInterval, rightInterval); + break; + case '+': + testResult = DateTimeUtils.add(leftInterval, rightInterval); + break; + default: + throw new IllegalArgumentException("Invalid operation " + operationType); + } + + assertEquals(String.format("%s %s %s", leftInterval, operationType, rightInterval), + expectedResult, testResult); + } + + private static void checkTsArithmetic( + String left, String right, String expected) throws Exception { + Timestamp leftTs = null; + if (left != null) { + leftTs = Timestamp.valueOf(left); + } + Timestamp rightTs = null; + if (left != null) { + rightTs = Timestamp.valueOf(right); + } + HiveIntervalDayTime expectedResult = null; + if (expected != null) { + expectedResult = HiveIntervalDayTime.valueOf(expected); + } + HiveIntervalDayTime testResult = + DateTimeUtils.subtract(leftTs, rightTs); + + assertEquals(String.format("%s - %s", leftTs, rightTs), + expectedResult, testResult); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index 909f6ae..fcc1c49 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -332,6 +332,8 @@ system.registerGenericUDF(serdeConstants.DATE_TYPE_NAME, GenericUDFToDate.class); system.registerGenericUDF(serdeConstants.TIMESTAMP_TYPE_NAME, GenericUDFTimestamp.class); + system.registerGenericUDF(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, GenericUDFToIntervalYearMonth.class); + system.registerGenericUDF(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME, GenericUDFToIntervalDayTime.class); system.registerGenericUDF(serdeConstants.BINARY_TYPE_NAME, GenericUDFToBinary.class); system.registerGenericUDF(serdeConstants.DECIMAL_TYPE_NAME, GenericUDFToDecimal.class); system.registerGenericUDF(serdeConstants.VARCHAR_TYPE_NAME, GenericUDFToVarchar.class); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java index 6a3c300..0738842 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java @@ -82,6 +82,8 @@ public static MapJoinKey read(Output output, MapJoinObjectSerDeContext context, SUPPORTED_PRIMITIVES.add(PrimitiveCategory.STRING); SUPPORTED_PRIMITIVES.add(PrimitiveCategory.DATE); SUPPORTED_PRIMITIVES.add(PrimitiveCategory.TIMESTAMP); + SUPPORTED_PRIMITIVES.add(PrimitiveCategory.INTERVAL_YEAR_MONTH); + SUPPORTED_PRIMITIVES.add(PrimitiveCategory.INTERVAL_DAY_TIME); SUPPORTED_PRIMITIVES.add(PrimitiveCategory.BINARY); SUPPORTED_PRIMITIVES.add(PrimitiveCategory.VARCHAR); SUPPORTED_PRIMITIVES.add(PrimitiveCategory.CHAR); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java index 4b1f5c1..45b0b7f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.ql.optimizer.calcite.translator; +import java.math.BigDecimal; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; @@ -26,11 +27,14 @@ import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.sql.type.SqlTypeName; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.ParseDriver; +import org.apache.hive.common.util.DateTimeUtils; class ASTBuilder { @@ -218,6 +222,23 @@ static ASTNode literal(RexLiteral literal, boolean useTypeQualInLiteral) { val = "'" + val + "'"; } break; + case INTERVAL_YEAR_MONTH: { + type = HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL; + BigDecimal monthsBd = (BigDecimal) literal.getValue(); + HiveIntervalYearMonth intervalYearMonth = new HiveIntervalYearMonth(monthsBd.intValue()); + val = "'" + intervalYearMonth.toString() + "'"; + break; + } + case INTERVAL_DAY_TIME: { + type = HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL; + BigDecimal millisBd = (BigDecimal) literal.getValue(); + + // Calcite literal is in millis, convert to seconds + BigDecimal secsBd = millisBd.divide(BigDecimal.valueOf(1000)); + HiveIntervalDayTime intervalDayTime = new HiveIntervalDayTime(secsBd); + val = "'" + intervalDayTime.toString() + "'"; + break; + } case NULL: type = HiveParser.TOK_NULL; break; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java index 4dbac57..29134a4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java @@ -29,6 +29,7 @@ import java.util.List; import java.util.Map; +import org.apache.calcite.avatica.util.TimeUnit; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataType; @@ -37,14 +38,18 @@ import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; +import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlCastFunction; +import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.type.Decimal128; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; @@ -371,13 +376,15 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticEx calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal((Double) value), calciteDataType); break; case CHAR: - if (value instanceof HiveChar) + if (value instanceof HiveChar) { value = ((HiveChar) value).getValue(); + } calciteLiteral = rexBuilder.makeLiteral((String) value); break; case VARCHAR: - if (value instanceof HiveVarchar) + if (value instanceof HiveVarchar) { value = ((HiveVarchar) value).getValue(); + } calciteLiteral = rexBuilder.makeLiteral((String) value); break; case STRING: @@ -398,6 +405,21 @@ protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticEx } calciteLiteral = rexBuilder.makeTimestampLiteral(c, RelDataType.PRECISION_NOT_SPECIFIED); break; + case INTERVAL_YEAR_MONTH: + // Calcite year-month literal value is months as BigDecimal + BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths()); + calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths, + new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1,1))); + break; + case INTERVAL_DAY_TIME: + // Calcite day-time interval is millis value as BigDecimal + // Seconds converted to millis + BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000); + // Nanos converted to millis + BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6); + calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd), + new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1,1))); + break; case VOID: calciteLiteral = cluster.getRexBuilder().makeLiteral(null, cluster.getTypeFactory().createSqlType(SqlTypeName.NULL), true); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java index 88c989f..8c3587e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/TypeConverter.java @@ -22,11 +22,14 @@ import java.util.List; import java.util.Map; +import org.apache.calcite.avatica.util.TimeUnit; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.sql.parser.SqlParserPos; +import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.RowSchema; @@ -65,6 +68,8 @@ b.put(SqlTypeName.DOUBLE.getName(), new HiveToken(HiveParser.TOK_DOUBLE, "TOK_DOUBLE")); b.put(SqlTypeName.DATE.getName(), new HiveToken(HiveParser.TOK_DATE, "TOK_DATE")); b.put(SqlTypeName.TIMESTAMP.getName(), new HiveToken(HiveParser.TOK_TIMESTAMP, "TOK_TIMESTAMP")); + b.put(SqlTypeName.INTERVAL_YEAR_MONTH.getName(), new HiveToken(HiveParser.TOK_INTERVAL_YEAR_MONTH, "TOK_INTERVAL_YEAR_MONTH")); + b.put(SqlTypeName.INTERVAL_DAY_TIME.getName(), new HiveToken(HiveParser.TOK_INTERVAL_DAY_TIME, "TOK_INTERVAL_DAY_TIME")); b.put(SqlTypeName.BINARY.getName(), new HiveToken(HiveParser.TOK_BINARY, "TOK_BINARY")); calciteToHiveTypeNameMap = b.build(); }; @@ -162,6 +167,14 @@ public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtF case TIMESTAMP: convertedType = dtFactory.createSqlType(SqlTypeName.TIMESTAMP); break; + case INTERVAL_YEAR_MONTH: + convertedType = dtFactory.createSqlIntervalType( + new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1,1))); + break; + case INTERVAL_DAY_TIME: + convertedType = dtFactory.createSqlIntervalType( + new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1,1))); + break; case BINARY: convertedType = dtFactory.createSqlType(SqlTypeName.BINARY); break; @@ -277,6 +290,10 @@ public static TypeInfo convertPrimtiveType(RelDataType rType) { return TypeInfoFactory.dateTypeInfo; case TIMESTAMP: return TypeInfoFactory.timestampTypeInfo; + case INTERVAL_YEAR_MONTH: + return TypeInfoFactory.intervalYearMonthTypeInfo; + case INTERVAL_DAY_TIME: + return TypeInfoFactory.intervalDayTimeTypeInfo; case BINARY: return TypeInfoFactory.binaryTypeInfo; case DECIMAL: diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g index 90b84ac..97e931b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g @@ -123,6 +123,7 @@ KW_DOUBLE: 'DOUBLE'; KW_DATE: 'DATE'; KW_DATETIME: 'DATETIME'; KW_TIMESTAMP: 'TIMESTAMP'; +KW_INTERVAL: 'INTERVAL'; KW_DECIMAL: 'DECIMAL'; KW_STRING: 'STRING'; KW_CHAR: 'CHAR'; @@ -298,6 +299,12 @@ KW_AUTHORIZATION: 'AUTHORIZATION'; KW_CONF: 'CONF'; KW_VALUES: 'VALUES'; KW_RELOAD: 'RELOAD'; +KW_YEAR: 'YEAR'; +KW_MONTH: 'MONTH'; +KW_DAY: 'DAY'; +KW_HOUR: 'HOUR'; +KW_MINUTE: 'MINUTE'; +KW_SECOND: 'SECOND'; // Operators // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 72b852e..bfa78f3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -111,6 +111,16 @@ TOK_DATELITERAL; TOK_DATETIME; TOK_TIMESTAMP; TOK_TIMESTAMPLITERAL; +TOK_INTERVAL_YEAR_MONTH; +TOK_INTERVAL_YEAR_MONTH_LITERAL; +TOK_INTERVAL_DAY_TIME; +TOK_INTERVAL_DAY_TIME_LITERAL; +TOK_INTERVAL_YEAR_LITERAL; +TOK_INTERVAL_MONTH_LITERAL; +TOK_INTERVAL_DAY_LITERAL; +TOK_INTERVAL_HOUR_LITERAL; +TOK_INTERVAL_MINUTE_LITERAL; +TOK_INTERVAL_SECOND_LITERAL; TOK_STRING; TOK_CHAR; TOK_VARCHAR; @@ -2018,6 +2028,9 @@ primitiveType | KW_DATE -> TOK_DATE | KW_DATETIME -> TOK_DATETIME | KW_TIMESTAMP -> TOK_TIMESTAMP + // Uncomment to allow intervals as table column types + //| KW_INTERVAL KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH + //| KW_INTERVAL KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME | KW_STRING -> TOK_STRING | KW_BINARY -> TOK_BINARY | KW_DECIMAL (LPAREN prec=Number (COMMA scale=Number)? RPAREN)? -> ^(TOK_DECIMAL $prec? $scale?) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g index cabf971..89d5cd6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g @@ -237,6 +237,7 @@ constant Number | dateLiteral | timestampLiteral + | intervalLiteral | StringLiteral | stringLiteralSequence | BigintLiteral @@ -277,6 +278,26 @@ timestampLiteral } ; +intervalLiteral + : + KW_INTERVAL StringLiteral qualifiers=intervalQualifiers -> + { + adaptor.create(qualifiers.tree.token.getType(), $StringLiteral.text) + } + ; + +intervalQualifiers + : + KW_YEAR KW_TO KW_MONTH -> TOK_INTERVAL_YEAR_MONTH_LITERAL + | KW_DAY KW_TO KW_SECOND -> TOK_INTERVAL_DAY_TIME_LITERAL + | KW_YEAR -> TOK_INTERVAL_YEAR_LITERAL + | KW_MONTH -> TOK_INTERVAL_MONTH_LITERAL + | KW_DAY -> TOK_INTERVAL_DAY_LITERAL + | KW_HOUR -> TOK_INTERVAL_HOUR_LITERAL + | KW_MINUTE -> TOK_INTERVAL_MINUTE_LITERAL + | KW_SECOND -> TOK_INTERVAL_SECOND_LITERAL + ; + expression @init { gParent.pushMsg("expression specification", state); } @after { gParent.popMsg(state); } @@ -573,4 +594,5 @@ principalIdentifier nonReserved : KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE | KW_AUTHORIZATION | KW_VALUES | KW_URI | KW_SERVER | KW_RELOAD + | KW_INTERVAL | KW_YEAR | KW_MONTH | KW_DAY | KW_HOUR | KW_MINUTE | KW_SECOND ; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index 706390b..07c6ebc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.parse; +import java.math.BigDecimal; import java.sql.Date; import java.sql.Timestamp; import java.util.ArrayList; @@ -34,6 +35,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FunctionInfo; @@ -75,6 +78,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; +import org.apache.hive.common.util.DateTimeUtils; import com.google.common.collect.Lists; @@ -175,9 +179,18 @@ public static ExprNodeDesc processGByExpr(Node nd, Object procCtx) + HiveParser.KW_FALSE + "%"), tf.getBoolExprProcessor()); opRules.put(new RuleRegExp("R5", HiveParser.TOK_DATELITERAL + "%|" + HiveParser.TOK_TIMESTAMPLITERAL + "%"), tf.getDateTimeExprProcessor()); - opRules.put(new RuleRegExp("R6", HiveParser.TOK_TABLE_OR_COL + "%"), + opRules.put(new RuleRegExp("R6", + HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL + "%|" + + HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL + "%|" + + HiveParser.TOK_INTERVAL_YEAR_LITERAL + "%|" + + HiveParser.TOK_INTERVAL_MONTH_LITERAL + "%|" + + HiveParser.TOK_INTERVAL_DAY_LITERAL + "%|" + + HiveParser.TOK_INTERVAL_HOUR_LITERAL + "%|" + + HiveParser.TOK_INTERVAL_MINUTE_LITERAL + "%|" + + HiveParser.TOK_INTERVAL_SECOND_LITERAL + "%"), tf.getIntervalExprProcessor()); + opRules.put(new RuleRegExp("R7", HiveParser.TOK_TABLE_OR_COL + "%"), tf.getColumnExprProcessor()); - opRules.put(new RuleRegExp("R7", HiveParser.TOK_SUBQUERY_OP + "%"), + opRules.put(new RuleRegExp("R8", HiveParser.TOK_SUBQUERY_OP + "%"), tf.getSubQueryExprProcessor()); // The dispatcher fires the processor corresponding to the closest matching @@ -472,6 +485,79 @@ public DateTimeExprProcessor getDateTimeExprProcessor() { } /** + * Processor for interval constants. + */ + public static class IntervalExprProcessor implements NodeProcessor { + + private static final BigDecimal NANOS_PER_SEC_BD = new BigDecimal(DateTimeUtils.NANOS_PER_SEC); + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + + TypeCheckCtx ctx = (TypeCheckCtx) procCtx; + if (ctx.getError() != null) { + return null; + } + + ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx); + if (desc != null) { + return desc; + } + + ASTNode expr = (ASTNode) nd; + String intervalString = BaseSemanticAnalyzer.stripQuotes(expr.getText()); + + // Get the string value and convert to a Interval value. + try { + switch (expr.getType()) { + case HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL: + return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo, + HiveIntervalYearMonth.valueOf(intervalString)); + case HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL: + return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo, + HiveIntervalDayTime.valueOf(intervalString)); + case HiveParser.TOK_INTERVAL_YEAR_LITERAL: + return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo, + new HiveIntervalYearMonth(Integer.parseInt(intervalString), 0)); + case HiveParser.TOK_INTERVAL_MONTH_LITERAL: + return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo, + new HiveIntervalYearMonth(0, Integer.parseInt(intervalString))); + case HiveParser.TOK_INTERVAL_DAY_LITERAL: + return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo, + new HiveIntervalDayTime(Integer.parseInt(intervalString), 0, 0, 0, 0)); + case HiveParser.TOK_INTERVAL_HOUR_LITERAL: + return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo, + new HiveIntervalDayTime(0, Integer.parseInt(intervalString), 0, 0, 0)); + case HiveParser.TOK_INTERVAL_MINUTE_LITERAL: + return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo, + new HiveIntervalDayTime(0, 0, Integer.parseInt(intervalString), 0, 0)); + case HiveParser.TOK_INTERVAL_SECOND_LITERAL: + BigDecimal bd = new BigDecimal(intervalString); + BigDecimal bdSeconds = new BigDecimal(bd.toBigInteger()); + BigDecimal bdNanos = bd.subtract(bdSeconds); + return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo, + new HiveIntervalDayTime(0, 0, 0, bdSeconds.intValueExact(), + bdNanos.multiply(NANOS_PER_SEC_BD).intValue())); + default: + throw new IllegalArgumentException("Invalid time literal type " + expr.getType()); + } + } catch (Exception err) { + throw new SemanticException( + "Unable to convert interval literal '" + intervalString + "' to interval value.", err); + } + } + } + + /** + * Factory method to get IntervalExprProcessor. + * + * @return IntervalExprProcessor. + */ + public IntervalExprProcessor getIntervalExprProcessor() { + return new IntervalExprProcessor(); + } + + /** * Processor for table columns. */ public static class ColumnExprProcessor implements NodeProcessor { @@ -619,6 +705,10 @@ public ColumnExprProcessor getColumnExprProcessor() { serdeConstants.DATE_TYPE_NAME); conversionFunctionTextHashMap.put(HiveParser.TOK_TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME); + conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_YEAR_MONTH, + serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME); + conversionFunctionTextHashMap.put(HiveParser.TOK_INTERVAL_DAY_TIME, + serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME); conversionFunctionTextHashMap.put(HiveParser.TOK_DECIMAL, serdeConstants.DECIMAL_TYPE_NAME); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseDTI.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseDTI.java new file mode 100644 index 0000000..dcdbd53 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseDTI.java @@ -0,0 +1,17 @@ +package org.apache.hadoop.hive.ql.udf.generic; + +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; + +public abstract class GenericUDFBaseDTI extends GenericUDFBaseBinary { + protected transient PrimitiveObjectInspector[] inputOIs; + + protected boolean checkArgs(PrimitiveCategory leftType, PrimitiveCategory rightType) { + boolean result = false; + if (inputOIs[0].getPrimitiveCategory() == leftType + && inputOIs[1].getPrimitiveCategory() == rightType) { + result = true; + } + return result; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java index c5bec44..cea72d5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseUnary.java @@ -24,6 +24,8 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; @@ -53,6 +55,10 @@ protected FloatWritable floatWritable = new FloatWritable(); protected DoubleWritable doubleWritable = new DoubleWritable(); protected HiveDecimalWritable decimalWritable = new HiveDecimalWritable(); + protected HiveIntervalYearMonthWritable intervalYearMonthWritable = + new HiveIntervalYearMonthWritable(); + protected HiveIntervalDayTimeWritable intervalDayTimeWritable = + new HiveIntervalDayTimeWritable(); public GenericUDFBaseUnary() { opName = getClass().getSimpleName(); @@ -74,11 +80,13 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen } inputOI = (PrimitiveObjectInspector) arguments[0]; - if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo())) { + if (!FunctionRegistry.isNumericType(inputOI.getTypeInfo()) + && (inputOI.getTypeInfo() != TypeInfoFactory.intervalDayTimeTypeInfo) + && (inputOI.getTypeInfo() != TypeInfoFactory.intervalYearMonthTypeInfo)) { throw new UDFArgumentTypeException(0, "The " + GenericUDFUtils.getOrdinal(1) - + " argument of " + opName + " is expected to a " - + "numeric type, but " + + " argument of " + opName + " is expected to be a " + + "numeric or interval type, but " + inputOI.getTypeName() + " is found"); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java new file mode 100644 index 0000000..ca30dd8 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java @@ -0,0 +1,255 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.generic; + +import java.sql.Date; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; +import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hive.common.util.DateTimeUtils; + +public class GenericUDFOPDTIMinus extends GenericUDFBaseDTI { + + protected transient OperationType minusOpType; + protected transient int intervalArg1Idx; + protected transient int intervalArg2Idx; + protected transient int dtArg1Idx; + protected transient int dtArg2Idx; + protected transient Converter dt1Converter; + protected transient Converter dt2Converter; + + protected transient DateWritable dateResult = new DateWritable(); + protected transient TimestampWritable timestampResult = new TimestampWritable(); + protected transient HiveIntervalYearMonthWritable intervalYearMonthResult = + new HiveIntervalYearMonthWritable(); + protected transient HiveIntervalDayTimeWritable intervalDayTimeResult = + new HiveIntervalDayTimeWritable(); + + enum OperationType { + INTERVALYM_MINUS_INTERVALYM, + DATE_MINUS_INTERVALYM, + TIMESTAMP_MINUS_INTERVALYM, + INTERVALDT_MINUS_INTERVALDT, + TIMESTAMP_MINUS_INTERVALDT, + TIMESTAMP_MINUS_TIMESTAMP + }; + + public GenericUDFOPDTIMinus() { + this.opName = getClass().getSimpleName(); + this.opDisplayName = "-"; + } + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) + throws UDFArgumentException { + + if (arguments.length != 2) { + throw new UDFArgumentException(opName + " requires two arguments."); + } + + PrimitiveObjectInspector resultOI = null; + + for (int i = 0; i < 2; i++) { + Category category = arguments[i].getCategory(); + if (category != Category.PRIMITIVE) { + throw new UDFArgumentTypeException(i, "The " + + GenericUDFUtils.getOrdinal(i + 1) + + " argument of " + opName + " is expected to a " + + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + + category.toString().toLowerCase() + " is found"); + } + } + + inputOIs = new PrimitiveObjectInspector[] { + (PrimitiveObjectInspector) arguments[0], + (PrimitiveObjectInspector) arguments[1] + }; + PrimitiveObjectInspector leftOI = inputOIs[0]; + PrimitiveObjectInspector rightOI = inputOIs[1]; + + // Allowed operations: + // IntervalYearMonth - IntervalYearMonth = IntervalYearMonth + // Date - IntervalYearMonth = Date (operands not reversible) + // Timestamp - IntervalYearMonth = Timestamp (operands not reversible) + // IntervalDayTime - IntervalDayTime = IntervalDayTime + // Date - IntervalYearMonth = Timestamp (operands not reversible) + // Timestamp - IntervalYearMonth = Timestamp (operands not reversible) + // Timestamp - Timestamp = IntervalDayTime + // Date - Date = IntervalDayTime + // Timestamp - Date = IntervalDayTime (operands reversible) + if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) { + minusOpType = OperationType.INTERVALYM_MINUS_INTERVALYM; + intervalArg1Idx = 0; + intervalArg2Idx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.intervalYearMonthTypeInfo); + } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) { + minusOpType = OperationType.DATE_MINUS_INTERVALYM; + dtArg1Idx = 0; + intervalArg1Idx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.dateTypeInfo); + } else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) { + minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALYM; + dtArg1Idx = 0; + intervalArg1Idx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.timestampTypeInfo); + } else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) { + minusOpType = OperationType.INTERVALDT_MINUS_INTERVALDT; + intervalArg1Idx = 0; + intervalArg2Idx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.intervalDayTimeTypeInfo); + } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME) + || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) { + minusOpType = OperationType.TIMESTAMP_MINUS_INTERVALDT; + dtArg1Idx = 0; + intervalArg1Idx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.timestampTypeInfo); + dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI); + } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.DATE) + || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.TIMESTAMP) + || checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.TIMESTAMP) + || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.DATE)) { + // Operands converted to timestamp, result as interval day-time + minusOpType = OperationType.TIMESTAMP_MINUS_TIMESTAMP; + dtArg1Idx = 0; + dtArg2Idx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.intervalDayTimeTypeInfo); + dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI); + dt2Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI); + } else { + // Unsupported types - error + List argTypeInfos = new ArrayList(2); + argTypeInfos.add(leftOI.getTypeInfo()); + argTypeInfos.add(rightOI.getTypeInfo()); + throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null); + } + + return resultOI; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + switch (minusOpType) { + case INTERVALYM_MINUS_INTERVALYM: { + HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth( + arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]); + HiveIntervalYearMonth iym2 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth( + arguments[intervalArg2Idx].get(), inputOIs[intervalArg2Idx]); + return handleIntervalYearMonthResult(DateTimeUtils.subtract(iym1, iym2)); + } + case DATE_MINUS_INTERVALYM: { + HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth( + arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]); + Date dt1 = PrimitiveObjectInspectorUtils.getDate( + arguments[dtArg1Idx].get(), inputOIs[dtArg1Idx]); + return handleDateResult(DateTimeUtils.subtract(dt1, iym1)); + } + case TIMESTAMP_MINUS_INTERVALYM: { + HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth( + arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]); + Timestamp ts1 = PrimitiveObjectInspectorUtils.getTimestamp( + arguments[dtArg1Idx].get(), inputOIs[dtArg1Idx]); + return handleTimestampResult(DateTimeUtils.subtract(ts1, iym1)); + } + case INTERVALDT_MINUS_INTERVALDT: { + HiveIntervalDayTime idt1 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime( + arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]); + HiveIntervalDayTime idt2 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime( + arguments[intervalArg2Idx].get(), inputOIs[intervalArg2Idx]); + return handleIntervalDayTimeResult(DateTimeUtils.subtract(idt1, idt2)); + } + case TIMESTAMP_MINUS_INTERVALDT: { + HiveIntervalDayTime idt1 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime( + arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]); + Timestamp ts1 = PrimitiveObjectInspectorUtils.getTimestamp( + arguments[dtArg1Idx].get(), inputOIs[dtArg1Idx]); + return handleTimestampResult(DateTimeUtils.subtract(ts1, idt1)); + } + case TIMESTAMP_MINUS_TIMESTAMP: { + Timestamp ts1 = PrimitiveObjectInspectorUtils.getTimestamp( + arguments[dtArg1Idx].get(), inputOIs[dtArg1Idx]); + Timestamp ts2 = PrimitiveObjectInspectorUtils.getTimestamp( + arguments[dtArg2Idx].get(), inputOIs[dtArg2Idx]); + return handleIntervalDayTimeResult(DateTimeUtils.subtract(ts1, ts2)); + } + default: + throw new HiveException("Unknown PlusOpType " + minusOpType); + } + } + + protected DateWritable handleDateResult(Date result) { + if (result == null) { + return null; + } + dateResult.set(result); + return dateResult; + } + + protected TimestampWritable handleTimestampResult(Timestamp result) { + if (result == null) { + return null; + } + timestampResult.set(result); + return timestampResult; + } + + protected HiveIntervalYearMonthWritable handleIntervalYearMonthResult( + HiveIntervalYearMonth result) { + if (result == null) { + return null; + } + intervalYearMonthResult.set(result); + return intervalYearMonthResult; + } + + protected HiveIntervalDayTimeWritable handleIntervalDayTimeResult( + HiveIntervalDayTime result) { + if (result == null) { + return null; + } + intervalDayTimeResult.set(result); + return intervalDayTimeResult; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java new file mode 100644 index 0000000..7268a54 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java @@ -0,0 +1,250 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.generic; + +import java.sql.Date; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; +import org.apache.hadoop.hive.serde2.io.TimestampWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hive.common.util.DateTimeUtils; + +public class GenericUDFOPDTIPlus extends GenericUDFBaseDTI { + + protected transient OperationType plusOpType; + protected transient int intervalArg1Idx; + protected transient int intervalArg2Idx; + protected transient int dtArgIdx; + protected transient Converter dtConverter; + + protected transient TimestampWritable timestampResult = new TimestampWritable(); + protected transient DateWritable dateResult = new DateWritable(); + protected transient HiveIntervalDayTimeWritable intervalDayTimeResult = + new HiveIntervalDayTimeWritable(); + protected transient HiveIntervalYearMonthWritable intervalYearMonthResult = + new HiveIntervalYearMonthWritable(); + + enum OperationType { + INTERVALYM_PLUS_INTERVALYM, + INTERVALYM_PLUS_DATE, + INTERVALYM_PLUS_TIMESTAMP, + INTERVALDT_PLUS_INTERVALDT, + INTERVALDT_PLUS_TIMESTAMP, + }; + + public GenericUDFOPDTIPlus() { + this.opName = getClass().getSimpleName(); + this.opDisplayName = "+"; + } + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) + throws UDFArgumentException { + + if (arguments.length != 2) { + throw new UDFArgumentException(opName + " requires two arguments."); + } + + PrimitiveObjectInspector resultOI = null; + + for (int i = 0; i < 2; i++) { + Category category = arguments[i].getCategory(); + if (category != Category.PRIMITIVE) { + throw new UDFArgumentTypeException(i, "The " + + GenericUDFUtils.getOrdinal(i + 1) + + " argument of " + opName + " is expected to a " + + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + + category.toString().toLowerCase() + " is found"); + } + } + + inputOIs = new PrimitiveObjectInspector[] { + (PrimitiveObjectInspector) arguments[0], + (PrimitiveObjectInspector) arguments[1] + }; + PrimitiveObjectInspector leftOI = inputOIs[0]; + PrimitiveObjectInspector rightOI = inputOIs[1]; + + // Allowed operations: + // IntervalYearMonth + IntervalYearMonth = IntervalYearMonth + // IntervalYearMonth + Date = Date (operands reversible) + // IntervalYearMonth + Timestamp = Timestamp (operands reversible) + // IntervalDayTime + IntervalDayTime = IntervalDayTime + // IntervalDayTime + Date = Timestamp (operands reversible) + // IntervalDayTime + Timestamp = Timestamp (operands reversible) + if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) { + plusOpType = OperationType.INTERVALYM_PLUS_INTERVALYM; + intervalArg1Idx = 0; + intervalArg2Idx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.intervalYearMonthTypeInfo); + } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) { + plusOpType = OperationType.INTERVALYM_PLUS_DATE; + dtArgIdx = 0; + intervalArg1Idx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.dateTypeInfo); + } else if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.DATE)) { + plusOpType = OperationType.INTERVALYM_PLUS_DATE; + intervalArg1Idx = 0; + dtArgIdx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.dateTypeInfo); + } else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) { + plusOpType = OperationType.INTERVALYM_PLUS_TIMESTAMP; + dtArgIdx = 0; + intervalArg1Idx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.timestampTypeInfo); + } else if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.TIMESTAMP)) { + plusOpType = OperationType.INTERVALYM_PLUS_TIMESTAMP; + intervalArg1Idx = 0; + dtArgIdx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.timestampTypeInfo); + } else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) { + plusOpType = OperationType.INTERVALDT_PLUS_INTERVALDT; + intervalArg1Idx = 0; + intervalArg2Idx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.intervalDayTimeTypeInfo); + } else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.DATE) + || checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.TIMESTAMP)) { + plusOpType = OperationType.INTERVALDT_PLUS_TIMESTAMP; + intervalArg1Idx = 0; + dtArgIdx = 1; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.timestampTypeInfo); + dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI); + } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME) + || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) { + plusOpType = OperationType.INTERVALDT_PLUS_TIMESTAMP; + intervalArg1Idx = 1; + dtArgIdx = 0; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.timestampTypeInfo); + dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI); + } else { + // Unsupported types - error + List argTypeInfos = new ArrayList(2); + argTypeInfos.add(leftOI.getTypeInfo()); + argTypeInfos.add(rightOI.getTypeInfo()); + throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null); + } + + return resultOI; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + switch (plusOpType) { + case INTERVALYM_PLUS_INTERVALYM: { + HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth( + arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]); + HiveIntervalYearMonth iym2 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth( + arguments[intervalArg2Idx].get(), inputOIs[intervalArg2Idx]); + return handleIntervalYearMonthResult(DateTimeUtils.add(iym1, iym2)); + } + case INTERVALYM_PLUS_DATE: { + HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth( + arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]); + Date dt1 = PrimitiveObjectInspectorUtils.getDate( + arguments[dtArgIdx].get(), inputOIs[dtArgIdx]); + return handleDateResult(DateTimeUtils.add(dt1, iym1)); + } + case INTERVALYM_PLUS_TIMESTAMP: { + HiveIntervalYearMonth iym1 = PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth( + arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]); + Timestamp ts1 = PrimitiveObjectInspectorUtils.getTimestamp( + arguments[dtArgIdx].get(), inputOIs[dtArgIdx]); + return handleTimestampResult(DateTimeUtils.add(ts1, iym1)); + } + case INTERVALDT_PLUS_INTERVALDT: { + HiveIntervalDayTime idt1 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime( + arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]); + HiveIntervalDayTime idt2 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime( + arguments[intervalArg2Idx].get(), inputOIs[intervalArg2Idx]); + return handleIntervalDayTimeResult(DateTimeUtils.add(idt1, idt2)); + } + case INTERVALDT_PLUS_TIMESTAMP: { + HiveIntervalDayTime idt1 = PrimitiveObjectInspectorUtils.getHiveIntervalDayTime( + arguments[intervalArg1Idx].get(), inputOIs[intervalArg1Idx]); + Timestamp ts1 = PrimitiveObjectInspectorUtils.getTimestamp( + arguments[dtArgIdx].get(), inputOIs[dtArgIdx]); + return handleTimestampResult(DateTimeUtils.add(ts1, idt1)); + } + default: + throw new HiveException("Unknown PlusOpType " + plusOpType); + } + } + + protected DateWritable handleDateResult(Date result) { + if (result == null) { + return null; + } + dateResult.set(result); + return dateResult; + } + + protected TimestampWritable handleTimestampResult(Timestamp result) { + if (result == null) { + return null; + } + timestampResult.set(result); + return timestampResult; + } + + protected HiveIntervalYearMonthWritable handleIntervalYearMonthResult( + HiveIntervalYearMonth result) { + if (result == null) { + return null; + } + intervalYearMonthResult.set(result); + return intervalYearMonthResult; + } + + protected HiveIntervalDayTimeWritable handleIntervalDayTimeResult( + HiveIntervalDayTime result) { + if (result == null) { + return null; + } + intervalDayTimeResult.set(result); + return intervalDayTimeResult; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java index 5419a19..18fbb5a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java @@ -46,7 +46,6 @@ protected GenericUDFBaseNumeric instantiateNumericUDF() { @Override protected GenericUDF instantiateDTIUDF() { - // TODO: implement date-time/interval version of UDF - return new GenericUDFOPNumericMinus(); + return new GenericUDFOPDTIMinus(); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java index d653264..de964d6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNegative.java @@ -19,6 +19,8 @@ package org.apache.hadoop.hive.ql.udf.generic; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColUnaryMinus; @@ -28,6 +30,8 @@ import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; @@ -81,6 +85,16 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { HiveDecimal dec = ((HiveDecimalWritable)input).getHiveDecimal(); decimalWritable.set(dec.negate()); return decimalWritable; + case INTERVAL_YEAR_MONTH: + HiveIntervalYearMonth intervalYearMonth = + ((HiveIntervalYearMonthWritable) input).getHiveIntervalYearMonth(); + this.intervalYearMonthWritable.set(intervalYearMonth.negate()); + return this.intervalYearMonthWritable; + case INTERVAL_DAY_TIME: + HiveIntervalDayTime intervalDayTime = + ((HiveIntervalDayTimeWritable) input).getHiveIntervalDayTime(); + this.intervalDayTimeWritable.set(intervalDayTime.negate()); + return intervalDayTimeWritable; default: // Should never happen. throw new RuntimeException("Unexpected type in evaluating " + opName + ": " + diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java index accd347..bfac5a8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java @@ -52,7 +52,6 @@ protected GenericUDFBaseNumeric instantiateNumericUDF() { @Override protected GenericUDF instantiateDTIUDF() { - // TODO: implement date-time/interval version of UDF - return new GenericUDFOPNumericPlus(); + return new GenericUDFOPDTIPlus(); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java new file mode 100644 index 0000000..9e0b1fd --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java @@ -0,0 +1,82 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.generic; + +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.HiveIntervalDayTimeConverter; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; + +/** +* +* GenericUDFIntervalDayTime +* +* Example usage: +* ... CAST( as INTERVAL DAY TO SECOND) ... +* +* Creates a HiveIntervalDayTimeWritable object using PrimitiveObjectInspectorConverter +* +*/ +public class GenericUDFToIntervalDayTime extends GenericUDF { + + private transient PrimitiveObjectInspector argumentOI; + private transient HiveIntervalDayTimeConverter tc; + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + if (arguments.length < 1) { + throw new UDFArgumentLengthException( + "The function TIMESTAMP requires at least one argument, got " + + arguments.length); + } + try { + argumentOI = (PrimitiveObjectInspector) arguments[0]; + } catch (ClassCastException e) { + throw new UDFArgumentException( + "The function TIMESTAMP takes only primitive types"); + } + + tc = new HiveIntervalDayTimeConverter(argumentOI, + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector); + return PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + Object o0 = arguments[0].get(); + if (o0 == null) { + return null; + } + + return tc.convert(o0); + } + + @Override + public String getDisplayString(String[] children) { + assert (children.length == 1); + StringBuilder sb = new StringBuilder(); + sb.append("CAST( "); + sb.append(children[0]); + sb.append(" AS INTERVAL DAY TO SECOND)"); + return sb.toString(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java new file mode 100644 index 0000000..e4eefe6 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java @@ -0,0 +1,82 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf.generic; + +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.HiveIntervalYearMonthConverter; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; + +/** +* +* GenericUDFIntervalYearMonth +* +* Example usage: +* ... CAST( as INTERVAL YEAR TO MONTH) ... +* +* Creates a HiveIntervalYearMonthWritable object using PrimitiveObjectInspectorConverter +* +*/ +public class GenericUDFToIntervalYearMonth extends GenericUDF { + + private transient PrimitiveObjectInspector argumentOI; + private transient HiveIntervalYearMonthConverter tc; + + @Override + public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { + if (arguments.length < 1) { + throw new UDFArgumentLengthException( + "The function TIMESTAMP requires at least one argument, got " + + arguments.length); + } + try { + argumentOI = (PrimitiveObjectInspector) arguments[0]; + } catch (ClassCastException e) { + throw new UDFArgumentException( + "The function TIMESTAMP takes only primitive types"); + } + + tc = new HiveIntervalYearMonthConverter(argumentOI, + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector); + return PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector; + } + + @Override + public Object evaluate(DeferredObject[] arguments) throws HiveException { + Object o0 = arguments[0].get(); + if (o0 == null) { + return null; + } + + return tc.convert(o0); + } + + @Override + public String getDisplayString(String[] children) { + assert (children.length == 1); + StringBuilder sb = new StringBuilder(); + sb.append("CAST( "); + sb.append(children[0]); + sb.append(" AS INTERVAL YEAR TO MONTH)"); + return sb.toString(); + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java index 24618c9..771a6c7 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java @@ -18,17 +18,26 @@ package org.apache.hadoop.hive.ql.udf.generic; +import java.sql.Date; +import java.sql.Timestamp; + import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; import org.apache.hadoop.hive.serde2.io.ByteWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; @@ -244,4 +253,143 @@ public void testReturnTypeAnsiSql() throws Exception { verifyReturnType(new GenericUDFOPMinus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)"); } + + @Test + public void testIntervalYearMonthMinusIntervalYearMonth() throws Exception { + GenericUDFOPMinus udf = new GenericUDFOPMinus(); + + HiveIntervalYearMonthWritable left = + new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("3-1")); + HiveIntervalYearMonthWritable right = + new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-2")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.intervalYearMonthTypeInfo, oi.getTypeInfo()); + HiveIntervalYearMonthWritable res = (HiveIntervalYearMonthWritable) udf.evaluate(args); + Assert.assertEquals(HiveIntervalYearMonth.valueOf("1-11"), res.getHiveIntervalYearMonth()); + } + + + @Test + public void testDateMinusIntervalYearMonth() throws Exception { + GenericUDFOPMinus udf = new GenericUDFOPMinus(); + + DateWritable left = + new DateWritable(Date.valueOf("2004-02-15")); + HiveIntervalYearMonthWritable right = + new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableDateObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo()); + DateWritable res = (DateWritable) udf.evaluate(args); + Assert.assertEquals(Date.valueOf("2001-06-15"), res.get()); + } + + @Test + public void testTimestampMinusIntervalYearMonth() throws Exception { + GenericUDFOPMinus udf = new GenericUDFOPMinus(); + + TimestampWritable left = + new TimestampWritable(Timestamp.valueOf("2004-01-15 01:02:03.123456789")); + HiveIntervalYearMonthWritable right = + new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo()); + TimestampWritable res = (TimestampWritable) udf.evaluate(args); + Assert.assertEquals(Timestamp.valueOf("2001-11-15 01:02:03.123456789"), res.getTimestamp()); + } + + @Test + public void testIntervalDayTimeMinusIntervalDayTime() throws Exception { + GenericUDFOPMinus udf = new GenericUDFOPMinus(); + + HiveIntervalDayTimeWritable left = + new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("2 2:3:4.567")); + HiveIntervalDayTimeWritable right = + new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.intervalDayTimeTypeInfo, oi.getTypeInfo()); + HiveIntervalDayTimeWritable res = (HiveIntervalDayTimeWritable) udf.evaluate(args); + Assert.assertEquals(HiveIntervalDayTime.valueOf("1 0:0:0.567"), res.getHiveIntervalDayTime()); + } + + @Test + public void testTimestampMinusIntervalDayTime() throws Exception { + GenericUDFOPMinus udf = new GenericUDFOPMinus(); + + TimestampWritable left = + new TimestampWritable(Timestamp.valueOf("2001-01-02 2:3:4.567")); + HiveIntervalDayTimeWritable right = + new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo()); + TimestampWritable res = (TimestampWritable) udf.evaluate(args); + Assert.assertEquals(Timestamp.valueOf("2001-01-01 00:00:00"), res.getTimestamp()); + } + + @Test + public void testDateMinusIntervalDayTime() throws Exception { + GenericUDFOPMinus udf = new GenericUDFOPMinus(); + + DateWritable left = + new DateWritable(Date.valueOf("2001-01-01")); + HiveIntervalDayTimeWritable right = + new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 0:0:0.555")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableDateObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo()); + TimestampWritable res = (TimestampWritable) udf.evaluate(args); + Assert.assertEquals(Timestamp.valueOf("2000-12-30 23:59:59.445"), res.getTimestamp()); + } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java index 4b2f7fb..eba4894 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java @@ -18,17 +18,26 @@ package org.apache.hadoop.hive.ql.udf.generic; +import java.sql.Date; +import java.sql.Timestamp; + import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject; import org.apache.hadoop.hive.serde2.io.ByteWritable; +import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; +import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; @@ -250,4 +259,236 @@ public void testReturnTypeAnsiSql() throws Exception { verifyReturnType(new GenericUDFOPPlus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)"); } + + @Test + public void testIntervalYearMonthPlusIntervalYearMonth() throws Exception { + GenericUDFOPPlus udf = new GenericUDFOPPlus(); + + HiveIntervalYearMonthWritable left = + new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-2")); + HiveIntervalYearMonthWritable right = + new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-11")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.intervalYearMonthTypeInfo, oi.getTypeInfo()); + HiveIntervalYearMonthWritable res = (HiveIntervalYearMonthWritable) udf.evaluate(args); + Assert.assertEquals(HiveIntervalYearMonth.valueOf("3-1"), res.getHiveIntervalYearMonth()); + } + + @Test + public void testIntervalYearMonthPlusDate() throws Exception { + GenericUDFOPPlus udf = new GenericUDFOPPlus(); + + HiveIntervalYearMonthWritable left = + new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8")); + DateWritable right = + new DateWritable(Date.valueOf("2001-06-15")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector, + PrimitiveObjectInspectorFactory.writableDateObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo()); + DateWritable res = (DateWritable) udf.evaluate(args); + Assert.assertEquals(Date.valueOf("2004-02-15"), res.get()); + } + + @Test + public void testDatePlusIntervalYearMonth() throws Exception { + GenericUDFOPPlus udf = new GenericUDFOPPlus(); + + DateWritable left = + new DateWritable(Date.valueOf("2001-06-15")); + HiveIntervalYearMonthWritable right = + new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableDateObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo()); + DateWritable res = (DateWritable) udf.evaluate(args); + Assert.assertEquals(Date.valueOf("2004-02-15"), res.get()); + } + + @Test + public void testIntervalYearMonthPlusTimestamp() throws Exception { + GenericUDFOPPlus udf = new GenericUDFOPPlus(); + + HiveIntervalYearMonthWritable left = + new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2")); + TimestampWritable right = + new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector, + PrimitiveObjectInspectorFactory.writableTimestampObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo()); + TimestampWritable res = (TimestampWritable) udf.evaluate(args); + Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"), res.getTimestamp()); + } + + @Test + public void testTimestampPlusIntervalYearMonth() throws Exception { + GenericUDFOPPlus udf = new GenericUDFOPPlus(); + + TimestampWritable left = + new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789")); + HiveIntervalYearMonthWritable right = + new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo()); + TimestampWritable res = (TimestampWritable) udf.evaluate(args); + Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"), res.getTimestamp()); + } + + @Test + public void testIntervalDayTimePlusIntervalDayTime() throws Exception { + GenericUDFOPPlus udf = new GenericUDFOPPlus(); + + HiveIntervalDayTimeWritable left = + new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 0:0:0.567")); + HiveIntervalDayTimeWritable right = + new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.intervalDayTimeTypeInfo, oi.getTypeInfo()); + HiveIntervalDayTimeWritable res = (HiveIntervalDayTimeWritable) udf.evaluate(args); + Assert.assertEquals(HiveIntervalDayTime.valueOf("2 2:3:4.567"), res.getHiveIntervalDayTime()); + } + + @Test + public void testIntervalDayTimePlusTimestamp() throws Exception { + GenericUDFOPPlus udf = new GenericUDFOPPlus(); + + HiveIntervalDayTimeWritable left = + new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567")); + TimestampWritable right = + new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector, + PrimitiveObjectInspectorFactory.writableTimestampObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo()); + TimestampWritable res = (TimestampWritable) udf.evaluate(args); + Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp()); + } + + @Test + public void testTimestampPlusIntervalDayTime() throws Exception { + GenericUDFOPPlus udf = new GenericUDFOPPlus(); + + TimestampWritable left = + new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00")); + HiveIntervalDayTimeWritable right = + new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo()); + TimestampWritable res = (TimestampWritable) udf.evaluate(args); + Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp()); + } + + @Test + public void testIntervalDayTimePlusDate() throws Exception { + GenericUDFOPPlus udf = new GenericUDFOPPlus(); + + HiveIntervalDayTimeWritable left = + new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567")); + DateWritable right = + new DateWritable(Date.valueOf("2001-01-01")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector, + PrimitiveObjectInspectorFactory.writableDateObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + // Date + day-time interval = timestamp + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo()); + TimestampWritable res = (TimestampWritable) udf.evaluate(args); + Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp()); + } + + @Test + public void testDatePlusIntervalDayTime() throws Exception { + GenericUDFOPPlus udf = new GenericUDFOPPlus(); + + DateWritable left = + new DateWritable(Date.valueOf("2001-01-01")); + HiveIntervalDayTimeWritable right = + new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567")); + ObjectInspector[] inputOIs = { + PrimitiveObjectInspectorFactory.writableDateObjectInspector, + PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector + }; + DeferredObject[] args = { + new DeferredJavaObject(left), + new DeferredJavaObject(right), + }; + + // Date + day-time interval = timestamp + PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); + Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo()); + TimestampWritable res = (TimestampWritable) udf.evaluate(args); + Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp()); + } } diff --git a/ql/src/test/queries/clientnegative/interval_1.q b/ql/src/test/queries/clientnegative/interval_1.q new file mode 100644 index 0000000..0db864d --- /dev/null +++ b/ql/src/test/queries/clientnegative/interval_1.q @@ -0,0 +1,2 @@ +-- year-month/day-time intervals not compatible +select interval_day_time(interval '1' year) from src limit 1; diff --git a/ql/src/test/queries/clientnegative/interval_2.q b/ql/src/test/queries/clientnegative/interval_2.q new file mode 100644 index 0000000..0af08ac --- /dev/null +++ b/ql/src/test/queries/clientnegative/interval_2.q @@ -0,0 +1,3 @@ +-- year-month/day-time intervals not compatible +select interval '1' year - interval '365' day from src limit 1; + diff --git a/ql/src/test/queries/clientnegative/interval_3.q b/ql/src/test/queries/clientnegative/interval_3.q new file mode 100644 index 0000000..1e2252f --- /dev/null +++ b/ql/src/test/queries/clientnegative/interval_3.q @@ -0,0 +1,3 @@ +-- year-month/day-time intervals not compatible +select interval '1' year + interval '365' day from src limit 1; + diff --git a/ql/src/test/queries/clientpositive/interval_1.q b/ql/src/test/queries/clientpositive/interval_1.q new file mode 100644 index 0000000..b3b586f --- /dev/null +++ b/ql/src/test/queries/clientpositive/interval_1.q @@ -0,0 +1,42 @@ +select + interval '10-11' year to month, + interval '10' year, + interval '11' month +from src limit 1; + +select + interval_year_month('10-11'), + interval_year_month(cast('10-11' as string)), + interval_year_month(cast('10-11' as varchar(10))), + interval_year_month(cast('10-11' as char(10))), + interval_year_month('10-11') = interval '10-11' year to month +from src limit 1; + +-- Test normalization of interval values +select + interval '49' month +from src limit 1; + +select + interval '10 9:8:7.987654321' day to second, + interval '10' day, + interval '11' hour, + interval '12' minute, + interval '13' second, + interval '13.123456789' second +from src limit 1; + +select + interval_day_time('2 1:2:3'), + interval_day_time(cast('2 1:2:3' as string)), + interval_day_time(cast('2 1:2:3' as varchar(10))), + interval_day_time(cast('2 1:2:3' as char(10))), + interval_day_time('2 1:2:3') = interval '2 1:2:3' day to second +from src limit 1; + +-- Test normalization of interval values +select + interval '49' hour, + interval '1470' minute, + interval '90061.111111111' second +from src limit 1; diff --git a/ql/src/test/queries/clientpositive/interval_2.q b/ql/src/test/queries/clientpositive/interval_2.q new file mode 100644 index 0000000..f622c76 --- /dev/null +++ b/ql/src/test/queries/clientpositive/interval_2.q @@ -0,0 +1,87 @@ +-- group-by/order-by/aggregation functions + +select + iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by iym +order by iym asc +limit 5; + +select + iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by iym +order by iym desc +limit 5; + +-- same query as previous, with having clause +select + iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by iym +having max(idt) > interval '496 0:0:0' day to second +order by iym desc +limit 5; + +select + idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by idt +order by idt asc +limit 5; + +select + idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by idt +order by idt desc +limit 5; + +-- same query as previous, with having clause +select + idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by idt +having max(iym) < interval '496-0' year to month +order by idt desc +limit 5; + +select + count(iym), count(idt), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1; + diff --git a/ql/src/test/queries/clientpositive/interval_3.q b/ql/src/test/queries/clientpositive/interval_3.q new file mode 100644 index 0000000..dec9bed --- /dev/null +++ b/ql/src/test/queries/clientpositive/interval_3.q @@ -0,0 +1,38 @@ +-- where clause +select + l_orderkey, l_shipdate, l_receiptdate +from lineitem + where (cast(l_shipdate as date) - date '1992-01-01') < interval '365 0:0:0' day to second +order by l_orderkey; + +select + l_orderkey, l_shipdate, l_receiptdate +from lineitem + where (cast(l_shipdate as date) + interval '1-0' year to month) <= date '1994-01-01' +order by l_orderkey; + +select + l_orderkey, l_shipdate, l_receiptdate +from lineitem + where (cast(l_shipdate as date) + interval '1-0' year to month) <= date '1994-01-01' + and (cast(l_receiptdate as date) - cast(l_shipdate as date)) < interval '10' day +order by l_orderkey; + + +-- joins +select + a.l_orderkey, b.l_orderkey, a.interval1 +from + ( + select + l_orderkey, l_shipdate, l_receiptdate, (cast(l_receiptdate as date) - cast(l_shipdate as date)) as interval1 + from lineitem + ) a + join + ( + select + l_orderkey, l_shipdate, l_receiptdate, (cast(l_receiptdate as date) - date '1992-07-02') as interval2 + from lineitem + ) b + on a.interval1 = b.interval2 and a.l_orderkey = b.l_orderkey +order by a.l_orderkey; diff --git a/ql/src/test/queries/clientpositive/interval_arithmetic.q b/ql/src/test/queries/clientpositive/interval_arithmetic.q new file mode 100644 index 0000000..06acbd7 --- /dev/null +++ b/ql/src/test/queries/clientpositive/interval_arithmetic.q @@ -0,0 +1,162 @@ +create table interval_arithmetic_1 (dateval date, tsval timestamp); +insert overwrite table interval_arithmetic_1 + select cast(ctimestamp1 as date), ctimestamp1 from alltypesorc; + +-- interval year-month arithmetic +explain +select + dateval, + dateval - interval '2-2' year to month, + dateval - interval '-2-2' year to month, + dateval + interval '2-2' year to month, + dateval + interval '-2-2' year to month, + - interval '2-2' year to month + dateval, + interval '2-2' year to month + dateval +from interval_arithmetic_1 +limit 2; + +select + dateval, + dateval - interval '2-2' year to month, + dateval - interval '-2-2' year to month, + dateval + interval '2-2' year to month, + dateval + interval '-2-2' year to month, + - interval '2-2' year to month + dateval, + interval '2-2' year to month + dateval +from interval_arithmetic_1 +limit 2; + +explain +select + dateval, + dateval - date '1999-06-07', + date '1999-06-07' - dateval, + dateval - dateval +from interval_arithmetic_1 +limit 2; + +select + dateval, + dateval - date '1999-06-07', + date '1999-06-07' - dateval, + dateval - dateval +from interval_arithmetic_1 +limit 2; + +explain +select + tsval, + tsval - interval '2-2' year to month, + tsval - interval '-2-2' year to month, + tsval + interval '2-2' year to month, + tsval + interval '-2-2' year to month, + - interval '2-2' year to month + tsval, + interval '2-2' year to month + tsval +from interval_arithmetic_1 +limit 2; + +select + tsval, + tsval - interval '2-2' year to month, + tsval - interval '-2-2' year to month, + tsval + interval '2-2' year to month, + tsval + interval '-2-2' year to month, + - interval '2-2' year to month + tsval, + interval '2-2' year to month + tsval +from interval_arithmetic_1 +limit 2; + +explain +select + interval '2-2' year to month + interval '3-3' year to month, + interval '2-2' year to month - interval '3-3' year to month +from interval_arithmetic_1 +limit 2; + +select + interval '2-2' year to month + interval '3-3' year to month, + interval '2-2' year to month - interval '3-3' year to month +from interval_arithmetic_1 +limit 2; + + +-- interval day-time arithmetic +explain +select + dateval, + dateval - interval '99 11:22:33.123456789' day to second, + dateval - interval '-99 11:22:33.123456789' day to second, + dateval + interval '99 11:22:33.123456789' day to second, + dateval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + dateval, + interval '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic_1 +limit 2; + +select + dateval, + dateval - interval '99 11:22:33.123456789' day to second, + dateval - interval '-99 11:22:33.123456789' day to second, + dateval + interval '99 11:22:33.123456789' day to second, + dateval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + dateval, + interval '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic_1 +limit 2; + +explain +select + dateval, + tsval, + dateval - tsval, + tsval - dateval, + tsval - tsval +from interval_arithmetic_1 +limit 2; + +select + dateval, + tsval, + dateval - tsval, + tsval - dateval, + tsval - tsval +from interval_arithmetic_1 +limit 2; + +explain +select + tsval, + tsval - interval '99 11:22:33.123456789' day to second, + tsval - interval '-99 11:22:33.123456789' day to second, + tsval + interval '99 11:22:33.123456789' day to second, + tsval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + tsval, + interval '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic_1 +limit 2; + +select + tsval, + tsval - interval '99 11:22:33.123456789' day to second, + tsval - interval '-99 11:22:33.123456789' day to second, + tsval + interval '99 11:22:33.123456789' day to second, + tsval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + tsval, + interval '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic_1 +limit 2; + +explain +select + interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second, + interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second +from interval_arithmetic_1 +limit 2; + +select + interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second, + interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second +from interval_arithmetic_1 +limit 2; + +drop table interval_arithmetic_1; diff --git a/ql/src/test/queries/clientpositive/interval_comparison.q b/ql/src/test/queries/clientpositive/interval_comparison.q new file mode 100644 index 0000000..8e78664 --- /dev/null +++ b/ql/src/test/queries/clientpositive/interval_comparison.q @@ -0,0 +1,85 @@ + +-- should all be true +select + i1 = i1, + i1 = i2, + i1 >= i2, + i1 <= i2, + i3 = i3, + i3 = i4, + i3 <= i4, + i3 >= i4, + i1 < i3, + i3 > i1, + i1 != i3 +from ( + select + interval '2-0' year to month as i1, + interval '2' year as i2, + interval '2-1' year to month as i3, + interval '25' month as i4 + from src limit 1 +) q1; + +-- should all be false +select + i1 != i1, + i1 != i2, + i1 < i2, + i1 > i2, + i1 = i3, + i1 > i3, + i1 >= i3, + i3 < i1, + i3 <= i1 +from ( + select + interval '2-0' year to month as i1, + interval '2' year as i2, + interval '2-1' year to month as i3, + interval '25' month as i4 + from src limit 1 +) q1; + +-- should all be true +select + i1 = i1, + i1 = i2, + i1 >= i2, + i1 <= i2, + i3 = i3, + i3 = i4, + i3 <= i4, + i3 >= i4, + i1 < i3, + i3 > i1, + i1 != i3 +from ( + select + interval '1 0:0:0' day to second as i1, + interval '24' hour as i2, + interval '1 0:0:1' day to second as i3, + interval '86401' second as i4 + from src limit 1 +) q1; + +-- should all be false +select + i1 != i1, + i1 != i2, + i1 < i2, + i1 > i2, + i1 = i3, + i1 > i3, + i1 >= i3, + i3 < i1, + i3 <= i1 +from ( + select + interval '1 0:0:0' day to second as i1, + interval '24' hour as i2, + interval '1 0:0:1' day to second as i3, + interval '86401' second as i4 + from src limit 1 +) q1; + diff --git a/ql/src/test/results/clientnegative/interval_1.q.out b/ql/src/test/results/clientnegative/interval_1.q.out new file mode 100644 index 0000000..208b165 --- /dev/null +++ b/ql/src/test/results/clientnegative/interval_1.q.out @@ -0,0 +1 @@ +FAILED: RuntimeException Cannot convert to IntervalDayTime from: interval_year_month diff --git a/ql/src/test/results/clientnegative/interval_2.q.out b/ql/src/test/results/clientnegative/interval_2.q.out new file mode 100644 index 0000000..b00d27c --- /dev/null +++ b/ql/src/test/results/clientnegative/interval_2.q.out @@ -0,0 +1 @@ +FAILED: SemanticException Line 0:-1 Wrong arguments ''365'': No matching method for class org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPDTIMinus with (interval_year_month, interval_day_time) diff --git a/ql/src/test/results/clientnegative/interval_3.q.out b/ql/src/test/results/clientnegative/interval_3.q.out new file mode 100644 index 0000000..0b33ae0 --- /dev/null +++ b/ql/src/test/results/clientnegative/interval_3.q.out @@ -0,0 +1 @@ +FAILED: SemanticException Line 0:-1 Wrong arguments ''365'': No matching method for class org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPDTIPlus with (interval_year_month, interval_day_time) diff --git a/ql/src/test/results/clientpositive/interval_1.q.out b/ql/src/test/results/clientpositive/interval_1.q.out new file mode 100644 index 0000000..ae2471b --- /dev/null +++ b/ql/src/test/results/clientpositive/interval_1.q.out @@ -0,0 +1,116 @@ +PREHOOK: query: select + interval '10-11' year to month, + interval '10' year, + interval '11' month +from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select + interval '10-11' year to month, + interval '10' year, + interval '11' month +from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +10-11 10-0 0-11 +PREHOOK: query: select + interval_year_month('10-11'), + interval_year_month(cast('10-11' as string)), + interval_year_month(cast('10-11' as varchar(10))), + interval_year_month(cast('10-11' as char(10))), + interval_year_month('10-11') = interval '10-11' year to month +from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select + interval_year_month('10-11'), + interval_year_month(cast('10-11' as string)), + interval_year_month(cast('10-11' as varchar(10))), + interval_year_month(cast('10-11' as char(10))), + interval_year_month('10-11') = interval '10-11' year to month +from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +10-11 10-11 10-11 10-11 true +PREHOOK: query: -- Test normalization of interval values +select + interval '49' month +from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: -- Test normalization of interval values +select + interval '49' month +from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +4-1 +PREHOOK: query: select + interval '10 9:8:7.987654321' day to second, + interval '10' day, + interval '11' hour, + interval '12' minute, + interval '13' second, + interval '13.123456789' second +from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select + interval '10 9:8:7.987654321' day to second, + interval '10' day, + interval '11' hour, + interval '12' minute, + interval '13' second, + interval '13.123456789' second +from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +10 09:08:07.987654321 10 00:00:00.000000000 0 11:00:00.000000000 0 00:12:00.000000000 0 00:00:13.000000000 0 00:00:13.123456789 +PREHOOK: query: select + interval_day_time('2 1:2:3'), + interval_day_time(cast('2 1:2:3' as string)), + interval_day_time(cast('2 1:2:3' as varchar(10))), + interval_day_time(cast('2 1:2:3' as char(10))), + interval_day_time('2 1:2:3') = interval '2 1:2:3' day to second +from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select + interval_day_time('2 1:2:3'), + interval_day_time(cast('2 1:2:3' as string)), + interval_day_time(cast('2 1:2:3' as varchar(10))), + interval_day_time(cast('2 1:2:3' as char(10))), + interval_day_time('2 1:2:3') = interval '2 1:2:3' day to second +from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +2 01:02:03.000000000 2 01:02:03.000000000 2 01:02:03.000000000 2 01:02:03.000000000 true +PREHOOK: query: -- Test normalization of interval values +select + interval '49' hour, + interval '1470' minute, + interval '90061.111111111' second +from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: -- Test normalization of interval values +select + interval '49' hour, + interval '1470' minute, + interval '90061.111111111' second +from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +2 01:00:00.000000000 1 00:30:00.000000000 1 01:01:01.111111111 diff --git a/ql/src/test/results/clientpositive/interval_2.q.out b/ql/src/test/results/clientpositive/interval_2.q.out new file mode 100644 index 0000000..6c40c53 --- /dev/null +++ b/ql/src/test/results/clientpositive/interval_2.q.out @@ -0,0 +1,231 @@ +PREHOOK: query: -- group-by/order-by/aggregation functions + +select + iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by iym +order by iym asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: -- group-by/order-by/aggregation functions + +select + iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by iym +order by iym asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +0-1 3 0 0 0-1 0-1 0 01:01:01.000000000 0 01:01:01.000000000 +2-1 1 2 2 2-1 2-1 2 01:01:01.000000000 2 01:01:01.000000000 +4-1 1 4 4 4-1 4-1 4 01:01:01.000000000 4 01:01:01.000000000 +5-1 3 5 5 5-1 5-1 5 01:01:01.000000000 5 01:01:01.000000000 +8-1 1 8 8 8-1 8-1 8 01:01:01.000000000 8 01:01:01.000000000 +PREHOOK: query: select + iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by iym +order by iym desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select + iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by iym +order by iym desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +498-1 3 498 498 498-1 498-1 498 01:01:01.000000000 498 01:01:01.000000000 +497-1 1 497 497 497-1 497-1 497 01:01:01.000000000 497 01:01:01.000000000 +496-1 1 496 496 496-1 496-1 496 01:01:01.000000000 496 01:01:01.000000000 +495-1 1 495 495 495-1 495-1 495 01:01:01.000000000 495 01:01:01.000000000 +494-1 1 494 494 494-1 494-1 494 01:01:01.000000000 494 01:01:01.000000000 +PREHOOK: query: -- same query as previous, with having clause +select + iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by iym +having max(idt) > interval '496 0:0:0' day to second +order by iym desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: -- same query as previous, with having clause +select + iym, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by iym +having max(idt) > interval '496 0:0:0' day to second +order by iym desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +498-1 3 498 498 498-1 498-1 498 01:01:01.000000000 498 01:01:01.000000000 +497-1 1 497 497 497-1 497-1 497 01:01:01.000000000 497 01:01:01.000000000 +496-1 1 496 496 496-1 496-1 496 01:01:01.000000000 496 01:01:01.000000000 +PREHOOK: query: select + idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by idt +order by idt asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select + idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by idt +order by idt asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +0 01:01:01.000000000 3 0 0 0-1 0-1 0 01:01:01.000000000 0 01:01:01.000000000 +2 01:01:01.000000000 1 2 2 2-1 2-1 2 01:01:01.000000000 2 01:01:01.000000000 +4 01:01:01.000000000 1 4 4 4-1 4-1 4 01:01:01.000000000 4 01:01:01.000000000 +5 01:01:01.000000000 3 5 5 5-1 5-1 5 01:01:01.000000000 5 01:01:01.000000000 +8 01:01:01.000000000 1 8 8 8-1 8-1 8 01:01:01.000000000 8 01:01:01.000000000 +PREHOOK: query: select + idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by idt +order by idt desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select + idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by idt +order by idt desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +498 01:01:01.000000000 3 498 498 498-1 498-1 498 01:01:01.000000000 498 01:01:01.000000000 +497 01:01:01.000000000 1 497 497 497-1 497-1 497 01:01:01.000000000 497 01:01:01.000000000 +496 01:01:01.000000000 1 496 496 496-1 496-1 496 01:01:01.000000000 496 01:01:01.000000000 +495 01:01:01.000000000 1 495 495 495-1 495-1 495 01:01:01.000000000 495 01:01:01.000000000 +494 01:01:01.000000000 1 494 494 494-1 494-1 494 01:01:01.000000000 494 01:01:01.000000000 +PREHOOK: query: -- same query as previous, with having clause +select + idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by idt +having max(iym) < interval '496-0' year to month +order by idt desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: -- same query as previous, with having clause +select + idt, count(*), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +group by idt +having max(iym) < interval '496-0' year to month +order by idt desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +495 01:01:01.000000000 1 495 495 495-1 495-1 495 01:01:01.000000000 495 01:01:01.000000000 +494 01:01:01.000000000 1 494 494 494-1 494-1 494 01:01:01.000000000 494 01:01:01.000000000 +493 01:01:01.000000000 1 493 493 493-1 493-1 493 01:01:01.000000000 493 01:01:01.000000000 +492 01:01:01.000000000 2 492 492 492-1 492-1 492 01:01:01.000000000 492 01:01:01.000000000 +491 01:01:01.000000000 1 491 491 491-1 491-1 491 01:01:01.000000000 491 01:01:01.000000000 +PREHOOK: query: select + count(iym), count(idt), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select + count(iym), count(idt), min(key), max(key), min(iym), max(iym), min(idt), max(idt) +from ( + select + key, + interval_year_month(concat(key, '-1')) as iym, + interval_day_time(concat(key, ' 1:1:1')) as idt + from src) q1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +500 500 0 98 0-1 498-1 0 01:01:01.000000000 498 01:01:01.000000000 diff --git a/ql/src/test/results/clientpositive/interval_3.q.out b/ql/src/test/results/clientpositive/interval_3.q.out new file mode 100644 index 0000000..1404d86 --- /dev/null +++ b/ql/src/test/results/clientpositive/interval_3.q.out @@ -0,0 +1,102 @@ +PREHOOK: query: -- where clause +select + l_orderkey, l_shipdate, l_receiptdate +from lineitem + where (cast(l_shipdate as date) - date '1992-01-01') < interval '365 0:0:0' day to second +order by l_orderkey +PREHOOK: type: QUERY +PREHOOK: Input: default@lineitem +#### A masked pattern was here #### +POSTHOOK: query: -- where clause +select + l_orderkey, l_shipdate, l_receiptdate +from lineitem + where (cast(l_shipdate as date) - date '1992-01-01') < interval '365 0:0:0' day to second +order by l_orderkey +POSTHOOK: type: QUERY +POSTHOOK: Input: default@lineitem +#### A masked pattern was here #### +6 1992-04-27 1992-05-02 +37 1992-07-10 1992-08-02 +37 1992-07-02 1992-07-28 +37 1992-07-21 1992-08-15 +PREHOOK: query: select + l_orderkey, l_shipdate, l_receiptdate +from lineitem + where (cast(l_shipdate as date) + interval '1-0' year to month) <= date '1994-01-01' +order by l_orderkey +PREHOOK: type: QUERY +PREHOOK: Input: default@lineitem +#### A masked pattern was here #### +POSTHOOK: query: select + l_orderkey, l_shipdate, l_receiptdate +from lineitem + where (cast(l_shipdate as date) + interval '1-0' year to month) <= date '1994-01-01' +order by l_orderkey +POSTHOOK: type: QUERY +POSTHOOK: Input: default@lineitem +#### A masked pattern was here #### +6 1992-04-27 1992-05-02 +37 1992-07-10 1992-08-02 +37 1992-07-02 1992-07-28 +37 1992-07-21 1992-08-15 +PREHOOK: query: select + l_orderkey, l_shipdate, l_receiptdate +from lineitem + where (cast(l_shipdate as date) + interval '1-0' year to month) <= date '1994-01-01' + and (cast(l_receiptdate as date) - cast(l_shipdate as date)) < interval '10' day +order by l_orderkey +PREHOOK: type: QUERY +PREHOOK: Input: default@lineitem +#### A masked pattern was here #### +POSTHOOK: query: select + l_orderkey, l_shipdate, l_receiptdate +from lineitem + where (cast(l_shipdate as date) + interval '1-0' year to month) <= date '1994-01-01' + and (cast(l_receiptdate as date) - cast(l_shipdate as date)) < interval '10' day +order by l_orderkey +POSTHOOK: type: QUERY +POSTHOOK: Input: default@lineitem +#### A masked pattern was here #### +6 1992-04-27 1992-05-02 +PREHOOK: query: -- joins +select + a.l_orderkey, b.l_orderkey, a.interval1 +from + ( + select + l_orderkey, l_shipdate, l_receiptdate, (cast(l_receiptdate as date) - cast(l_shipdate as date)) as interval1 + from lineitem + ) a + join + ( + select + l_orderkey, l_shipdate, l_receiptdate, (cast(l_receiptdate as date) - date '1992-07-02') as interval2 + from lineitem + ) b + on a.interval1 = b.interval2 and a.l_orderkey = b.l_orderkey +order by a.l_orderkey +PREHOOK: type: QUERY +PREHOOK: Input: default@lineitem +#### A masked pattern was here #### +POSTHOOK: query: -- joins +select + a.l_orderkey, b.l_orderkey, a.interval1 +from + ( + select + l_orderkey, l_shipdate, l_receiptdate, (cast(l_receiptdate as date) - cast(l_shipdate as date)) as interval1 + from lineitem + ) a + join + ( + select + l_orderkey, l_shipdate, l_receiptdate, (cast(l_receiptdate as date) - date '1992-07-02') as interval2 + from lineitem + ) b + on a.interval1 = b.interval2 and a.l_orderkey = b.l_orderkey +order by a.l_orderkey +POSTHOOK: type: QUERY +POSTHOOK: Input: default@lineitem +#### A masked pattern was here #### +37 37 26 00:00:00.000000000 diff --git a/ql/src/test/results/clientpositive/interval_arithmetic.q.out b/ql/src/test/results/clientpositive/interval_arithmetic.q.out new file mode 100644 index 0000000..0eb342c --- /dev/null +++ b/ql/src/test/results/clientpositive/interval_arithmetic.q.out @@ -0,0 +1,620 @@ +PREHOOK: query: create table interval_arithmetic_1 (dateval date, tsval timestamp) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@interval_arithmetic_1 +POSTHOOK: query: create table interval_arithmetic_1 (dateval date, tsval timestamp) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@interval_arithmetic_1 +PREHOOK: query: insert overwrite table interval_arithmetic_1 + select cast(ctimestamp1 as date), ctimestamp1 from alltypesorc +PREHOOK: type: QUERY +PREHOOK: Input: default@alltypesorc +PREHOOK: Output: default@interval_arithmetic_1 +POSTHOOK: query: insert overwrite table interval_arithmetic_1 + select cast(ctimestamp1 as date), ctimestamp1 from alltypesorc +POSTHOOK: type: QUERY +POSTHOOK: Input: default@alltypesorc +POSTHOOK: Output: default@interval_arithmetic_1 +POSTHOOK: Lineage: interval_arithmetic_1.dateval EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +POSTHOOK: Lineage: interval_arithmetic_1.tsval SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ] +PREHOOK: query: -- interval year-month arithmetic +explain +select + dateval, + dateval - interval '2-2' year to month, + dateval - interval '-2-2' year to month, + dateval + interval '2-2' year to month, + dateval + interval '-2-2' year to month, + - interval '2-2' year to month + dateval, + interval '2-2' year to month + dateval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: -- interval year-month arithmetic +explain +select + dateval, + dateval - interval '2-2' year to month, + dateval - interval '-2-2' year to month, + dateval + interval '2-2' year to month, + dateval + interval '-2-2' year to month, + - interval '2-2' year to month + dateval, + interval '2-2' year to month + dateval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: interval_arithmetic_1 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dateval (type: date), (dateval - 2-2) (type: date), (dateval - -2-2) (type: date), (dateval + 2-2) (type: date), (dateval + -2-2) (type: date), (-2-2 + dateval) (type: date), (2-2 + dateval) (type: date) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select + dateval, + dateval - interval '2-2' year to month, + dateval - interval '-2-2' year to month, + dateval + interval '2-2' year to month, + dateval + interval '-2-2' year to month, + - interval '2-2' year to month + dateval, + interval '2-2' year to month + dateval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dateval, + dateval - interval '2-2' year to month, + dateval - interval '-2-2' year to month, + dateval + interval '2-2' year to month, + dateval + interval '-2-2' year to month, + - interval '2-2' year to month + dateval, + interval '2-2' year to month + dateval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +1970-01-01 1967-11-01 1972-03-01 1972-03-01 1967-11-01 1967-11-01 1972-03-01 +NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: explain +select + dateval, + dateval - date '1999-06-07', + date '1999-06-07' - dateval, + dateval - dateval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + dateval, + dateval - date '1999-06-07', + date '1999-06-07' - dateval, + dateval - dateval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: interval_arithmetic_1 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dateval (type: date), (dateval - 1999-06-07) (type: interval_day_time), (1999-06-07 - dateval) (type: interval_day_time), (dateval - dateval) (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select + dateval, + dateval - date '1999-06-07', + date '1999-06-07' - dateval, + dateval - dateval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dateval, + dateval - date '1999-06-07', + date '1999-06-07' - dateval, + dateval - dateval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +1970-01-01 -10748 23:00:00.000000000 10748 23:00:00.000000000 0 00:00:00.000000000 +NULL NULL NULL NULL +PREHOOK: query: explain +select + tsval, + tsval - interval '2-2' year to month, + tsval - interval '-2-2' year to month, + tsval + interval '2-2' year to month, + tsval + interval '-2-2' year to month, + - interval '2-2' year to month + tsval, + interval '2-2' year to month + tsval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + tsval, + tsval - interval '2-2' year to month, + tsval - interval '-2-2' year to month, + tsval + interval '2-2' year to month, + tsval + interval '-2-2' year to month, + - interval '2-2' year to month + tsval, + interval '2-2' year to month + tsval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: interval_arithmetic_1 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: tsval (type: timestamp), (tsval - 2-2) (type: timestamp), (tsval - -2-2) (type: timestamp), (tsval + 2-2) (type: timestamp), (tsval + -2-2) (type: timestamp), (-2-2 + tsval) (type: timestamp), (2-2 + tsval) (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select + tsval, + tsval - interval '2-2' year to month, + tsval - interval '-2-2' year to month, + tsval + interval '2-2' year to month, + tsval + interval '-2-2' year to month, + - interval '2-2' year to month + tsval, + interval '2-2' year to month + tsval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +POSTHOOK: query: select + tsval, + tsval - interval '2-2' year to month, + tsval - interval '-2-2' year to month, + tsval + interval '2-2' year to month, + tsval + interval '-2-2' year to month, + - interval '2-2' year to month + tsval, + interval '2-2' year to month + tsval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +1969-12-31 15:59:46.674 1967-10-31 15:59:46.674 1972-02-29 15:59:46.674 1972-02-29 15:59:46.674 1967-10-31 15:59:46.674 1967-10-31 15:59:46.674 1972-02-29 15:59:46.674 +NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: explain +select + interval '2-2' year to month + interval '3-3' year to month, + interval '2-2' year to month - interval '3-3' year to month +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + interval '2-2' year to month + interval '3-3' year to month, + interval '2-2' year to month - interval '3-3' year to month +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + TableScan + alias: interval_arithmetic_1 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 5-5 (type: interval_year_month), -1-1 (type: interval_year_month) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12288 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + ListSink + +PREHOOK: query: select + interval '2-2' year to month + interval '3-3' year to month, + interval '2-2' year to month - interval '3-3' year to month +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +POSTHOOK: query: select + interval '2-2' year to month + interval '3-3' year to month, + interval '2-2' year to month - interval '3-3' year to month +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +5-5 -1-1 +5-5 -1-1 +PREHOOK: query: -- interval day-time arithmetic +explain +select + dateval, + dateval - interval '99 11:22:33.123456789' day to second, + dateval - interval '-99 11:22:33.123456789' day to second, + dateval + interval '99 11:22:33.123456789' day to second, + dateval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + dateval, + interval '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: -- interval day-time arithmetic +explain +select + dateval, + dateval - interval '99 11:22:33.123456789' day to second, + dateval - interval '-99 11:22:33.123456789' day to second, + dateval + interval '99 11:22:33.123456789' day to second, + dateval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + dateval, + interval '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: interval_arithmetic_1 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dateval (type: date), (dateval - 99 11:22:33.123456789) (type: timestamp), (dateval - -99 11:22:33.123456789) (type: timestamp), (dateval + 99 11:22:33.123456789) (type: timestamp), (dateval + -99 11:22:33.123456789) (type: timestamp), (-99 11:22:33.123456789 + dateval) (type: timestamp), (99 11:22:33.123456789 + dateval) (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select + dateval, + dateval - interval '99 11:22:33.123456789' day to second, + dateval - interval '-99 11:22:33.123456789' day to second, + dateval + interval '99 11:22:33.123456789' day to second, + dateval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + dateval, + interval '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dateval, + dateval - interval '99 11:22:33.123456789' day to second, + dateval - interval '-99 11:22:33.123456789' day to second, + dateval + interval '99 11:22:33.123456789' day to second, + dateval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + dateval, + interval '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +1970-01-01 1969-09-23 13:37:26.876543211 1970-04-10 11:22:33.123456789 1970-04-10 11:22:33.123456789 1969-09-23 13:37:26.876543211 1969-09-23 13:37:26.876543211 1970-04-10 11:22:33.123456789 +NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: explain +select + dateval, + tsval, + dateval - tsval, + tsval - dateval, + tsval - tsval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + dateval, + tsval, + dateval - tsval, + tsval - dateval, + tsval - tsval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: interval_arithmetic_1 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dateval (type: date), tsval (type: timestamp), (dateval - tsval) (type: interval_day_time), (tsval - dateval) (type: interval_day_time), (tsval - tsval) (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select + dateval, + tsval, + dateval - tsval, + tsval - dateval, + tsval - tsval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dateval, + tsval, + dateval - tsval, + tsval - dateval, + tsval - tsval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +1970-01-01 1969-12-31 15:59:46.674 0 08:00:12.326000000 -0 08:00:12.326000000 0 00:00:00.000000000 +NULL NULL NULL NULL NULL +PREHOOK: query: explain +select + tsval, + tsval - interval '99 11:22:33.123456789' day to second, + tsval - interval '-99 11:22:33.123456789' day to second, + tsval + interval '99 11:22:33.123456789' day to second, + tsval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + tsval, + interval '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + tsval, + tsval - interval '99 11:22:33.123456789' day to second, + tsval - interval '-99 11:22:33.123456789' day to second, + tsval + interval '99 11:22:33.123456789' day to second, + tsval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + tsval, + interval '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: interval_arithmetic_1 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: tsval (type: timestamp), (tsval - 99 11:22:33.123456789) (type: timestamp), (tsval - -99 11:22:33.123456789) (type: timestamp), (tsval + 99 11:22:33.123456789) (type: timestamp), (tsval + -99 11:22:33.123456789) (type: timestamp), (-99 11:22:33.123456789 + tsval) (type: timestamp), (99 11:22:33.123456789 + tsval) (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 52 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + ListSink + +PREHOOK: query: select + tsval, + tsval - interval '99 11:22:33.123456789' day to second, + tsval - interval '-99 11:22:33.123456789' day to second, + tsval + interval '99 11:22:33.123456789' day to second, + tsval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + tsval, + interval '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +POSTHOOK: query: select + tsval, + tsval - interval '99 11:22:33.123456789' day to second, + tsval - interval '-99 11:22:33.123456789' day to second, + tsval + interval '99 11:22:33.123456789' day to second, + tsval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + tsval, + interval '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +1969-12-31 15:59:46.674 1969-09-23 05:37:13.550543211 1970-04-10 03:22:19.797456789 1970-04-10 03:22:19.797456789 1969-09-23 05:37:13.550543211 1969-09-23 05:37:13.550543211 1970-04-10 03:22:19.797456789 +NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: explain +select + interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second, + interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second, + interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 2 + Processor Tree: + TableScan + alias: interval_arithmetic_1 + Statistics: Num rows: 12288 Data size: 326837 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + expressions: 109 20:30:40.246913578 (type: interval_day_time), 89 02:14:26.000000000 (type: interval_day_time) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 12288 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + Limit + Number of rows: 2 + Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE + ListSink + +PREHOOK: query: select + interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second, + interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second +from interval_arithmetic_1 +limit 2 +PREHOOK: type: QUERY +PREHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +POSTHOOK: query: select + interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second, + interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second +from interval_arithmetic_1 +limit 2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@interval_arithmetic_1 +#### A masked pattern was here #### +109 20:30:40.246913578 89 02:14:26.000000000 +109 20:30:40.246913578 89 02:14:26.000000000 +PREHOOK: query: drop table interval_arithmetic_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@interval_arithmetic_1 +PREHOOK: Output: default@interval_arithmetic_1 +POSTHOOK: query: drop table interval_arithmetic_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@interval_arithmetic_1 +POSTHOOK: Output: default@interval_arithmetic_1 diff --git a/ql/src/test/results/clientpositive/interval_comparison.q.out b/ql/src/test/results/clientpositive/interval_comparison.q.out new file mode 100644 index 0000000..455808a --- /dev/null +++ b/ql/src/test/results/clientpositive/interval_comparison.q.out @@ -0,0 +1,188 @@ +PREHOOK: query: -- should all be true +select + i1 = i1, + i1 = i2, + i1 >= i2, + i1 <= i2, + i3 = i3, + i3 = i4, + i3 <= i4, + i3 >= i4, + i1 < i3, + i3 > i1, + i1 != i3 +from ( + select + interval '2-0' year to month as i1, + interval '2' year as i2, + interval '2-1' year to month as i3, + interval '25' month as i4 + from src limit 1 +) q1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: -- should all be true +select + i1 = i1, + i1 = i2, + i1 >= i2, + i1 <= i2, + i3 = i3, + i3 = i4, + i3 <= i4, + i3 >= i4, + i1 < i3, + i3 > i1, + i1 != i3 +from ( + select + interval '2-0' year to month as i1, + interval '2' year as i2, + interval '2-1' year to month as i3, + interval '25' month as i4 + from src limit 1 +) q1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +true true true true true true true true true true true +PREHOOK: query: -- should all be false +select + i1 != i1, + i1 != i2, + i1 < i2, + i1 > i2, + i1 = i3, + i1 > i3, + i1 >= i3, + i3 < i1, + i3 <= i1 +from ( + select + interval '2-0' year to month as i1, + interval '2' year as i2, + interval '2-1' year to month as i3, + interval '25' month as i4 + from src limit 1 +) q1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: -- should all be false +select + i1 != i1, + i1 != i2, + i1 < i2, + i1 > i2, + i1 = i3, + i1 > i3, + i1 >= i3, + i3 < i1, + i3 <= i1 +from ( + select + interval '2-0' year to month as i1, + interval '2' year as i2, + interval '2-1' year to month as i3, + interval '25' month as i4 + from src limit 1 +) q1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +false false false false false false false false false +PREHOOK: query: -- should all be true +select + i1 = i1, + i1 = i2, + i1 >= i2, + i1 <= i2, + i3 = i3, + i3 = i4, + i3 <= i4, + i3 >= i4, + i1 < i3, + i3 > i1, + i1 != i3 +from ( + select + interval '1 0:0:0' day to second as i1, + interval '24' hour as i2, + interval '1 0:0:1' day to second as i3, + interval '86401' second as i4 + from src limit 1 +) q1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: -- should all be true +select + i1 = i1, + i1 = i2, + i1 >= i2, + i1 <= i2, + i3 = i3, + i3 = i4, + i3 <= i4, + i3 >= i4, + i1 < i3, + i3 > i1, + i1 != i3 +from ( + select + interval '1 0:0:0' day to second as i1, + interval '24' hour as i2, + interval '1 0:0:1' day to second as i3, + interval '86401' second as i4 + from src limit 1 +) q1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +true true true true true true true true true true true +PREHOOK: query: -- should all be false +select + i1 != i1, + i1 != i2, + i1 < i2, + i1 > i2, + i1 = i3, + i1 > i3, + i1 >= i3, + i3 < i1, + i3 <= i1 +from ( + select + interval '1 0:0:0' day to second as i1, + interval '24' hour as i2, + interval '1 0:0:1' day to second as i3, + interval '86401' second as i4 + from src limit 1 +) q1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: -- should all be false +select + i1 != i1, + i1 != i2, + i1 < i2, + i1 > i2, + i1 = i3, + i1 > i3, + i1 >= i3, + i3 < i1, + i3 <= i1 +from ( + select + interval '1 0:0:0' day to second as i1, + interval '24' hour as i2, + interval '1 0:0:1' day to second as i3, + interval '86401' second as i4 + from src limit 1 +) q1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +false false false false false false false false false diff --git a/serde/if/serde.thrift b/serde/if/serde.thrift index 2db27b5..d828bac 100644 --- a/serde/if/serde.thrift +++ b/serde/if/serde.thrift @@ -60,6 +60,8 @@ const string DATETIME_TYPE_NAME = "datetime"; const string TIMESTAMP_TYPE_NAME = "timestamp"; const string DECIMAL_TYPE_NAME = "decimal"; const string BINARY_TYPE_NAME = "binary"; +const string INTERVAL_YEAR_MONTH_TYPE_NAME = "interval_year_month"; +const string INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time"; const string LIST_TYPE_NAME = "array"; const string MAP_TYPE_NAME = "map"; @@ -71,7 +73,27 @@ const string LIST_COLUMN_TYPES = "columns.types"; const string TIMESTAMP_FORMATS = "timestamp.formats"; -const set PrimitiveTypes = [ VOID_TYPE_NAME BOOLEAN_TYPE_NAME TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME FLOAT_TYPE_NAME DOUBLE_TYPE_NAME STRING_TYPE_NAME VARCHAR_TYPE_NAME CHAR_TYPE_NAME DATE_TYPE_NAME DATETIME_TYPE_NAME TIMESTAMP_TYPE_NAME DECIMAL_TYPE_NAME BINARY_TYPE_NAME], +const set PrimitiveTypes = [ + VOID_TYPE_NAME + BOOLEAN_TYPE_NAME + TINYINT_TYPE_NAME + SMALLINT_TYPE_NAME + INT_TYPE_NAME + BIGINT_TYPE_NAME + FLOAT_TYPE_NAME + DOUBLE_TYPE_NAME + STRING_TYPE_NAME + VARCHAR_TYPE_NAME + CHAR_TYPE_NAME + DATE_TYPE_NAME + DATETIME_TYPE_NAME + TIMESTAMP_TYPE_NAME + INTERVAL_YEAR_MONTH_TYPE_NAME + INTERVAL_DAY_TIME_TYPE_NAME + DECIMAL_TYPE_NAME + BINARY_TYPE_NAME +], + const set CollectionTypes = [ LIST_TYPE_NAME MAP_TYPE_NAME ], const set IntegralTypes = [ TINYINT_TYPE_NAME SMALLINT_TYPE_NAME INT_TYPE_NAME BIGINT_TYPE_NAME ], diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp index cc71b65..5f8873b 100644 --- a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp +++ b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp @@ -77,6 +77,10 @@ serdeConstants::serdeConstants() { BINARY_TYPE_NAME = "binary"; + INTERVAL_YEAR_MONTH_TYPE_NAME = "interval_year_month"; + + INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time"; + LIST_TYPE_NAME = "array"; MAP_TYPE_NAME = "map"; @@ -105,6 +109,8 @@ serdeConstants::serdeConstants() { PrimitiveTypes.insert("date"); PrimitiveTypes.insert("datetime"); PrimitiveTypes.insert("timestamp"); + PrimitiveTypes.insert("interval_year_month"); + PrimitiveTypes.insert("interval_day_time"); PrimitiveTypes.insert("decimal"); PrimitiveTypes.insert("binary"); diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.h b/serde/src/gen/thrift/gen-cpp/serde_constants.h index 418f666..4f2018b 100644 --- a/serde/src/gen/thrift/gen-cpp/serde_constants.h +++ b/serde/src/gen/thrift/gen-cpp/serde_constants.h @@ -48,6 +48,8 @@ class serdeConstants { std::string TIMESTAMP_TYPE_NAME; std::string DECIMAL_TYPE_NAME; std::string BINARY_TYPE_NAME; + std::string INTERVAL_YEAR_MONTH_TYPE_NAME; + std::string INTERVAL_DAY_TIME_TYPE_NAME; std::string LIST_TYPE_NAME; std::string MAP_TYPE_NAME; std::string STRUCT_TYPE_NAME; diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java index e70d0c4..5f3001d 100644 --- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java +++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java @@ -99,6 +99,10 @@ public static final String BINARY_TYPE_NAME = "binary"; + public static final String INTERVAL_YEAR_MONTH_TYPE_NAME = "interval_year_month"; + + public static final String INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time"; + public static final String LIST_TYPE_NAME = "array"; public static final String MAP_TYPE_NAME = "map"; @@ -129,6 +133,8 @@ PrimitiveTypes.add("date"); PrimitiveTypes.add("datetime"); PrimitiveTypes.add("timestamp"); + PrimitiveTypes.add("interval_year_month"); + PrimitiveTypes.add("interval_day_time"); PrimitiveTypes.add("decimal"); PrimitiveTypes.add("binary"); } diff --git a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php index c1d8085..653662a 100644 --- a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php +++ b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php @@ -82,6 +82,10 @@ $GLOBALS['serde_CONSTANTS']['DECIMAL_TYPE_NAME'] = "decimal"; $GLOBALS['serde_CONSTANTS']['BINARY_TYPE_NAME'] = "binary"; +$GLOBALS['serde_CONSTANTS']['INTERVAL_YEAR_MONTH_TYPE_NAME'] = "interval_year_month"; + +$GLOBALS['serde_CONSTANTS']['INTERVAL_DAY_TIME_TYPE_NAME'] = "interval_day_time"; + $GLOBALS['serde_CONSTANTS']['LIST_TYPE_NAME'] = "array"; $GLOBALS['serde_CONSTANTS']['MAP_TYPE_NAME'] = "map"; @@ -111,6 +115,8 @@ $GLOBALS['serde_CONSTANTS']['PrimitiveTypes'] = array( "date" => true, "datetime" => true, "timestamp" => true, + "interval_year_month" => true, + "interval_day_time" => true, "decimal" => true, "binary" => true, ); diff --git a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py index 7d7608f..fa0edc7 100644 --- a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py +++ b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py @@ -42,6 +42,8 @@ TIMESTAMP_TYPE_NAME = "timestamp" DECIMAL_TYPE_NAME = "decimal" BINARY_TYPE_NAME = "binary" +INTERVAL_YEAR_MONTH_TYPE_NAME = "interval_year_month" +INTERVAL_DAY_TIME_TYPE_NAME = "interval_day_time" LIST_TYPE_NAME = "array" MAP_TYPE_NAME = "map" STRUCT_TYPE_NAME = "struct" @@ -64,6 +66,8 @@ "date", "datetime", "timestamp", + "interval_year_month", + "interval_day_time", "decimal", "binary", ]) diff --git a/serde/src/gen/thrift/gen-rb/serde_constants.rb b/serde/src/gen/thrift/gen-rb/serde_constants.rb index 33f12b0..1c893c1 100644 --- a/serde/src/gen/thrift/gen-rb/serde_constants.rb +++ b/serde/src/gen/thrift/gen-rb/serde_constants.rb @@ -73,6 +73,10 @@ DECIMAL_TYPE_NAME = %q"decimal" BINARY_TYPE_NAME = %q"binary" +INTERVAL_YEAR_MONTH_TYPE_NAME = %q"interval_year_month" + +INTERVAL_DAY_TIME_TYPE_NAME = %q"interval_day_time" + LIST_TYPE_NAME = %q"array" MAP_TYPE_NAME = %q"map" @@ -102,6 +106,8 @@ PrimitiveTypes = Set.new([ %q"date", %q"datetime", %q"timestamp", + %q"interval_year_month", + %q"interval_day_time", %q"decimal", %q"binary", ]) diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java index 2b7fba6..ccdb820 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java @@ -32,6 +32,8 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.ByteStream; @@ -45,6 +47,8 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; @@ -64,6 +68,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; @@ -244,11 +250,7 @@ static Object deserialize(InputByteBuffer buffer, TypeInfo type, case LONG: { LongWritable r = reuse == null ? new LongWritable() : (LongWritable) reuse; - long v = buffer.read(invert) ^ 0x80; - for (int i = 0; i < 7; i++) { - v = (v << 8) + (buffer.read(invert) & 0xff); - } - r.set(v); + r.set(deserializeLong(buffer, invert)); return r; } case FLOAT: { @@ -374,6 +376,22 @@ static Object deserialize(InputByteBuffer buffer, TypeInfo type, t.setBinarySortable(bytes, 0); return t; + case INTERVAL_YEAR_MONTH: { + HiveIntervalYearMonthWritable i = reuse == null ? new HiveIntervalYearMonthWritable() + : (HiveIntervalYearMonthWritable) reuse; + i.set(deserializeInt(buffer, invert)); + return i; + } + + case INTERVAL_DAY_TIME: { + HiveIntervalDayTimeWritable i = reuse == null ? new HiveIntervalDayTimeWritable() + : (HiveIntervalDayTimeWritable) reuse; + long totalSecs = deserializeLong(buffer, invert); + int nanos = deserializeInt(buffer, invert); + i.set(totalSecs, nanos); + return i; + } + case DECIMAL: { // See serialization of decimal for explanation (below) @@ -542,6 +560,14 @@ private static int deserializeInt(InputByteBuffer buffer, boolean invert) throws return v; } + private static long deserializeLong(InputByteBuffer buffer, boolean invert) throws IOException { + long v = buffer.read(invert) ^ 0x80; + for (int i = 0; i < 7; i++) { + v = (v << 8) + (buffer.read(invert) & 0xff); + } + return v; + } + static int getCharacterMaxLength(TypeInfo type) { return ((BaseCharTypeInfo)type).getLength(); } @@ -661,14 +687,7 @@ static void serialize(ByteStream.Output buffer, Object o, ObjectInspector oi, case LONG: { LongObjectInspector loi = (LongObjectInspector) poi; long v = loi.get(o); - writeByte(buffer, (byte) ((v >> 56) ^ 0x80), invert); - writeByte(buffer, (byte) (v >> 48), invert); - writeByte(buffer, (byte) (v >> 40), invert); - writeByte(buffer, (byte) (v >> 32), invert); - writeByte(buffer, (byte) (v >> 24), invert); - writeByte(buffer, (byte) (v >> 16), invert); - writeByte(buffer, (byte) (v >> 8), invert); - writeByte(buffer, (byte) v, invert); + serializeLong(buffer, v, invert); return; } case FLOAT: { @@ -755,6 +774,22 @@ static void serialize(ByteStream.Output buffer, Object o, ObjectInspector oi, } return; } + case INTERVAL_YEAR_MONTH: { + HiveIntervalYearMonthObjectInspector ioi = (HiveIntervalYearMonthObjectInspector) poi; + HiveIntervalYearMonth intervalYearMonth = ioi.getPrimitiveJavaObject(o); + int totalMonths = intervalYearMonth.getTotalMonths(); + serializeInt(buffer, totalMonths, invert); + return; + } + case INTERVAL_DAY_TIME: { + HiveIntervalDayTimeObjectInspector ioi = (HiveIntervalDayTimeObjectInspector) poi; + HiveIntervalDayTime intervalDayTime = ioi.getPrimitiveJavaObject(o); + long totalSecs = intervalDayTime.getTotalSeconds(); + int nanos = intervalDayTime.getNanos(); + serializeLong(buffer, totalSecs, invert); + serializeInt(buffer, nanos, invert); + return; + } case DECIMAL: { // decimals are encoded in three pieces: // sign: 1, 2 or 3 for smaller, equal or larger than 0 respectively @@ -877,6 +912,17 @@ private static void serializeInt(ByteStream.Output buffer, int v, boolean invert writeByte(buffer, (byte) v, invert); } + private static void serializeLong(ByteStream.Output buffer, long v, boolean invert) { + writeByte(buffer, (byte) ((v >> 56) ^ 0x80), invert); + writeByte(buffer, (byte) (v >> 48), invert); + writeByte(buffer, (byte) (v >> 40), invert); + writeByte(buffer, (byte) (v >> 32), invert); + writeByte(buffer, (byte) (v >> 24), invert); + writeByte(buffer, (byte) (v >> 16), invert); + writeByte(buffer, (byte) (v >> 8), invert); + writeByte(buffer, (byte) v, invert); + } + @Override public SerDeStats getSerDeStats() { // no support for statistics diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalDayTimeWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalDayTimeWritable.java new file mode 100644 index 0000000..02d0ce6 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalDayTimeWritable.java @@ -0,0 +1,124 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.io; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.MutableHiveIntervalDayTime; +import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput; +import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils; +import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; +import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VLong; +import org.apache.hadoop.io.WritableComparable; +import org.apache.hadoop.io.WritableUtils; + +public class HiveIntervalDayTimeWritable + implements WritableComparable { + + static final private Log LOG = LogFactory.getLog(HiveIntervalDayTimeWritable.class); + + protected MutableHiveIntervalDayTime intervalValue = new MutableHiveIntervalDayTime(); + + public HiveIntervalDayTimeWritable() { + } + + public HiveIntervalDayTimeWritable(HiveIntervalDayTime value) { + intervalValue.setValue(value); + } + + public HiveIntervalDayTimeWritable(HiveIntervalDayTimeWritable writable) { + intervalValue.setValue(writable.intervalValue); + } + + public void set(int days, int hours, int minutes, int seconds, int nanos) { + intervalValue.setValue(days, hours, minutes, seconds, nanos); + } + + public void set(HiveIntervalDayTime value) { + intervalValue.setValue(value); + } + + public void set(HiveIntervalDayTimeWritable writable) { + intervalValue.setValue(writable.intervalValue); + } + + public void set(long totalSeconds, int nanos) { + intervalValue.setValue(totalSeconds, nanos); + } + + public HiveIntervalDayTime getHiveIntervalDayTime() { + return new HiveIntervalDayTime(intervalValue); + } + + @Override + public void readFields(DataInput in) throws IOException { + // read totalSeconds, nanos from DataInput + set(WritableUtils.readVLong(in), WritableUtils.readVInt(in)); + } + + @Override + public void write(DataOutput out) throws IOException { + // write totalSeconds, nanos to DataOutput + WritableUtils.writeVLong(out, intervalValue.getTotalSeconds()); + WritableUtils.writeVInt(out, intervalValue.getNanos()); + } + + public void writeToByteStream(RandomAccessOutput byteStream) { + LazyBinaryUtils.writeVLong(byteStream, intervalValue.getTotalSeconds()); + LazyBinaryUtils.writeVInt(byteStream, intervalValue.getNanos()); + } + + public void setFromBytes(byte[] bytes, int offset, int length, VInt vInt, VLong vLong) { + LazyBinaryUtils.readVLong(bytes, offset, vLong); + LazyBinaryUtils.readVInt(bytes, offset + vLong.length, vInt); + assert (length == (vInt.length + vLong.length)); + set(vLong.value, vInt.value); + } + + @Override + public int compareTo(HiveIntervalDayTimeWritable other) { + return this.intervalValue.compareTo(other.intervalValue); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof HiveIntervalDayTimeWritable)) { + return false; + } + return 0 == compareTo((HiveIntervalDayTimeWritable) obj); + } + + @Override + public int hashCode() { + return intervalValue.hashCode(); + } + + @Override + public String toString() { + return intervalValue.toString(); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalYearMonthWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalYearMonthWritable.java new file mode 100644 index 0000000..c75ee57 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveIntervalYearMonthWritable.java @@ -0,0 +1,120 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.io; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.MutableHiveIntervalYearMonth; +import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput; +import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils; +import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; +import org.apache.hadoop.io.WritableComparable; +import org.apache.hadoop.io.WritableUtils; + +public class HiveIntervalYearMonthWritable + implements WritableComparable { + + static final private Log LOG = LogFactory.getLog(HiveIntervalYearMonthWritable.class); + + protected MutableHiveIntervalYearMonth intervalValue = new MutableHiveIntervalYearMonth(); + + public HiveIntervalYearMonthWritable() { + } + + public HiveIntervalYearMonthWritable(HiveIntervalYearMonth hiveInterval) { + intervalValue.setValue(hiveInterval); + } + + public HiveIntervalYearMonthWritable(HiveIntervalYearMonthWritable hiveIntervalWritable) { + intervalValue.setValue(hiveIntervalWritable.intervalValue); + } + + public void set(int years, int months) { + intervalValue.setValue(years, months); + } + + public void set(HiveIntervalYearMonth hiveInterval) { + intervalValue.setValue(hiveInterval); + } + + public void set(HiveIntervalYearMonthWritable hiveIntervalWritable) { + intervalValue.setValue(hiveIntervalWritable.intervalValue); + } + + public void set(int totalMonths) { + intervalValue.setValue(totalMonths); + } + + public HiveIntervalYearMonth getHiveIntervalYearMonth() { + return new HiveIntervalYearMonth(intervalValue); + } + + @Override + public void readFields(DataInput in) throws IOException { + // read totalMonths from DataInput + set(WritableUtils.readVInt(in)); + } + + @Override + public void write(DataOutput out) throws IOException { + // write totalMonths to DataOutput + WritableUtils.writeVInt(out, intervalValue.getTotalMonths()); + } + + public void writeToByteStream(RandomAccessOutput byteStream) { + LazyBinaryUtils.writeVInt(byteStream, intervalValue.getTotalMonths()); + } + + public void setFromBytes(byte[] bytes, int offset, int length, VInt vInt) { + LazyBinaryUtils.readVInt(bytes, offset, vInt); + assert (length == vInt.length); + set(vInt.value); + } + + @Override + public int compareTo(HiveIntervalYearMonthWritable other) { + return this.intervalValue.compareTo(other.intervalValue); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof HiveIntervalYearMonthWritable)) { + return false; + } + return 0 == compareTo((HiveIntervalYearMonthWritable) obj); + } + + @Override + public int hashCode() { + return intervalValue.hashCode(); + } + + @Override + public String toString() { + return intervalValue.toString(); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java index 864d9aa..7aa0fc7 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java @@ -35,6 +35,8 @@ import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyFloatObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveCharObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveDecimalObjectInspector; +import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveIntervalYearMonthObjectInspector; +import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveIntervalDayTimeObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIntObjectInspector; import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyLongObjectInspector; @@ -125,6 +127,10 @@ return new LazyDate((LazyDateObjectInspector) oi); case TIMESTAMP: return new LazyTimestamp((LazyTimestampObjectInspector) oi); + case INTERVAL_YEAR_MONTH: + return new LazyHiveIntervalYearMonth((LazyHiveIntervalYearMonthObjectInspector) oi); + case INTERVAL_DAY_TIME: + return new LazyHiveIntervalDayTime((LazyHiveIntervalDayTimeObjectInspector) oi); case BINARY: return new LazyBinary((LazyBinaryObjectInspector) oi); case DECIMAL: diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveIntervalDayTime.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveIntervalDayTime.java new file mode 100644 index 0000000..066bb48 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveIntervalDayTime.java @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.lazy; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; + +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveIntervalDayTimeObjectInspector; +import org.apache.hadoop.io.Text; + +public class LazyHiveIntervalDayTime + extends LazyPrimitive { + + public LazyHiveIntervalDayTime(LazyHiveIntervalDayTimeObjectInspector oi) { + super(oi); + data = new HiveIntervalDayTimeWritable(); + } + + public LazyHiveIntervalDayTime(LazyHiveIntervalDayTime copy) { + super(copy); + data = new HiveIntervalDayTimeWritable(copy.data); + } + + @Override + public void init(ByteArrayRef bytes, int start, int length) { + String s = null; + try { + s = Text.decode(bytes.getData(), start, length); + data.set(HiveIntervalDayTime.valueOf(s)); + isNull = false; + } catch (Exception e) { + isNull = true; + logExceptionMessage(bytes, start, length, "INTERVAL_DAY_TIME"); + } + } + + public static void writeUTF8(OutputStream out, HiveIntervalDayTimeWritable i) throws IOException { + ByteBuffer b = Text.encode(i.toString()); + out.write(b.array(), 0, b.limit()); + } + + @Override + public HiveIntervalDayTimeWritable getWritableObject() { + return data; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveIntervalYearMonth.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveIntervalYearMonth.java new file mode 100644 index 0000000..c21dc14 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyHiveIntervalYearMonth.java @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.lazy; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; +import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyHiveIntervalYearMonthObjectInspector; +import org.apache.hadoop.io.Text; + +public class LazyHiveIntervalYearMonth + extends LazyPrimitive { + + public LazyHiveIntervalYearMonth(LazyHiveIntervalYearMonthObjectInspector oi) { + super(oi); + data = new HiveIntervalYearMonthWritable(); + } + + public LazyHiveIntervalYearMonth(LazyHiveIntervalYearMonth copy) { + super(copy); + data = new HiveIntervalYearMonthWritable(copy.data); + } + + @Override + public void init(ByteArrayRef bytes, int start, int length) { + String s = null; + try { + s = Text.decode(bytes.getData(), start, length); + data.set(HiveIntervalYearMonth.valueOf(s)); + isNull = false; + } catch (Exception e) { + isNull = true; + logExceptionMessage(bytes, start, length, "INTERVAL_YEAR_MONTH"); + } + } + + public static void writeUTF8(OutputStream out, HiveIntervalYearMonthWritable i) + throws IOException { + ByteBuffer b = Text.encode(i.toString()); + out.write(b.array(), 0, b.limit()); + } + + @Override + public HiveIntervalYearMonthWritable getWritableObject() { + return data; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java index 879743f..ea344b3 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyUtils.java @@ -39,6 +39,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; @@ -254,6 +256,16 @@ public static void writePrimitiveUTF8(OutputStream out, Object o, ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o)); break; } + case INTERVAL_YEAR_MONTH: { + LazyHiveIntervalYearMonth.writeUTF8(out, + ((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveWritableObject(o)); + break; + } + case INTERVAL_DAY_TIME: { + LazyHiveIntervalDayTime.writeUTF8(out, + ((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveWritableObject(o)); + break; + } case DECIMAL: { LazyHiveDecimal.writeUTF8(out, ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o)); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveIntervalDayTimeObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveIntervalDayTimeObjectInspector.java new file mode 100644 index 0000000..6da4fde --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveIntervalDayTimeObjectInspector.java @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive; + +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.lazy.LazyHiveIntervalDayTime; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; + +public class LazyHiveIntervalDayTimeObjectInspector + extends AbstractPrimitiveLazyObjectInspector + implements HiveIntervalDayTimeObjectInspector{ + + LazyHiveIntervalDayTimeObjectInspector() { + super(TypeInfoFactory.intervalDayTimeTypeInfo); + } + + @Override + public Object copyObject(Object o) { + return o == null ? null : new LazyHiveIntervalDayTime((LazyHiveIntervalDayTime) o); + } + + @Override + public HiveIntervalDayTime getPrimitiveJavaObject(Object o) { + return o == null ? null : ((LazyHiveIntervalDayTime) o).getWritableObject().getHiveIntervalDayTime(); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveIntervalYearMonthObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveIntervalYearMonthObjectInspector.java new file mode 100644 index 0000000..cf3db06 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyHiveIntervalYearMonthObjectInspector.java @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; +import org.apache.hadoop.hive.serde2.lazy.LazyHiveIntervalYearMonth; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; + +public class LazyHiveIntervalYearMonthObjectInspector + extends AbstractPrimitiveLazyObjectInspector + implements HiveIntervalYearMonthObjectInspector{ + + LazyHiveIntervalYearMonthObjectInspector() { + super(TypeInfoFactory.intervalYearMonthTypeInfo); + } + + @Override + public Object copyObject(Object o) { + return o == null ? null : new LazyHiveIntervalYearMonth((LazyHiveIntervalYearMonth) o); + } + + @Override + public HiveIntervalYearMonth getPrimitiveJavaObject(Object o) { + return o == null ? null : ((LazyHiveIntervalYearMonth) o).getWritableObject().getHiveIntervalYearMonth(); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java index 7423c00..5b52768 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/primitive/LazyPrimitiveObjectInspectorFactory.java @@ -72,6 +72,10 @@ new LazyDateObjectInspector(); public static final LazyTimestampObjectInspector LAZY_TIMESTAMP_OBJECT_INSPECTOR = new LazyTimestampObjectInspector(); + public static final LazyHiveIntervalYearMonthObjectInspector LAZY_INTERVAL_YEAR_MONTH_OBJECT_INSPECTOR = + new LazyHiveIntervalYearMonthObjectInspector(); + public static final LazyHiveIntervalDayTimeObjectInspector LAZY_INTERVAL_DAY_TIME_OBJECT_INSPECTOR = + new LazyHiveIntervalDayTimeObjectInspector(); public static final LazyBinaryObjectInspector LAZY_BINARY_OBJECT_INSPECTOR = new LazyBinaryObjectInspector(); @@ -108,6 +112,10 @@ private LazyPrimitiveObjectInspectorFactory() { LAZY_DATE_OBJECT_INSPECTOR); cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME), LAZY_TIMESTAMP_OBJECT_INSPECTOR); + cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME), + LAZY_INTERVAL_YEAR_MONTH_OBJECT_INSPECTOR); + cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME), + LAZY_INTERVAL_DAY_TIME_OBJECT_INSPECTOR); cachedPrimitiveLazyObjectInspectors.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BINARY_TYPE_NAME), LAZY_BINARY_OBJECT_INSPECTOR); } diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java index 8bd5838..52f3527 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryFactory.java @@ -29,6 +29,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalDayTimeObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalYearMonthObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector; @@ -84,6 +86,10 @@ return new LazyBinaryDate((WritableDateObjectInspector) oi); case TIMESTAMP: return new LazyBinaryTimestamp((WritableTimestampObjectInspector) oi); + case INTERVAL_YEAR_MONTH: + return new LazyBinaryHiveIntervalYearMonth((WritableHiveIntervalYearMonthObjectInspector) oi); + case INTERVAL_DAY_TIME: + return new LazyBinaryHiveIntervalDayTime((WritableHiveIntervalDayTimeObjectInspector) oi); case BINARY: return new LazyBinaryBinary((WritableBinaryObjectInspector) oi); case DECIMAL: diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalDayTime.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalDayTime.java new file mode 100644 index 0000000..dda2b46 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalDayTime.java @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.lazybinary; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; +import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; +import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VLong; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalDayTimeObjectInspector; + +/** + * LazyBinaryHiveIntervalDayTime + * A LazyBinaryObject that encodes a HiveIntervalDayTime + */ +public class LazyBinaryHiveIntervalDayTime extends + LazyBinaryPrimitive{ + static final Log LOG = LogFactory.getLog(LazyBinaryHiveIntervalDayTime.class); + + /** + * Reusable member for decoding integer. + */ + VInt vInt = new LazyBinaryUtils.VInt(); + VLong vLong = new LazyBinaryUtils.VLong(); + + LazyBinaryHiveIntervalDayTime(WritableHiveIntervalDayTimeObjectInspector oi) { + super(oi); + data = new HiveIntervalDayTimeWritable(); + } + + LazyBinaryHiveIntervalDayTime(LazyBinaryHiveIntervalDayTime copy) { + super(copy); + data = new HiveIntervalDayTimeWritable(copy.data); + } + + @Override + public void init(ByteArrayRef bytes, int start, int length) { + data.setFromBytes(bytes.getData(), start, length, vInt, vLong); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalYearMonth.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalYearMonth.java new file mode 100644 index 0000000..426bb7a --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryHiveIntervalYearMonth.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.lazybinary; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; +import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; +import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalYearMonthObjectInspector; + +/** + * LazyBinaryHiveIntervalYearMonth + * A LazyBinaryObject that encodes a HiveIntervalYearMonth + */ +public class LazyBinaryHiveIntervalYearMonth extends + LazyBinaryPrimitive{ + static final Log LOG = LogFactory.getLog(LazyBinaryHiveIntervalYearMonth.class); + + /** + * Reusable member for decoding integer. + */ + VInt vInt = new LazyBinaryUtils.VInt(); + + LazyBinaryHiveIntervalYearMonth(WritableHiveIntervalYearMonthObjectInspector oi) { + super(oi); + data = new HiveIntervalYearMonthWritable(); + } + + LazyBinaryHiveIntervalYearMonth(LazyBinaryHiveIntervalYearMonth copy) { + super(copy); + data = new HiveIntervalYearMonthWritable(copy.data); + } + + @Override + public void init(ByteArrayRef bytes, int start, int length) { + data.setFromBytes(bytes.getData(), start, length, vInt); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java index 62cba01..3d14fbe 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java @@ -36,6 +36,8 @@ import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; @@ -54,6 +56,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; @@ -428,6 +432,20 @@ public static void serialize(RandomAccessOutput byteStream, Object obj, return; } + case INTERVAL_YEAR_MONTH: { + HiveIntervalYearMonthWritable intervalYearMonth = + ((HiveIntervalYearMonthObjectInspector) poi).getPrimitiveWritableObject(obj); + intervalYearMonth.writeToByteStream(byteStream); + return; + } + + case INTERVAL_DAY_TIME: { + HiveIntervalDayTimeWritable intervalDayTime = + ((HiveIntervalDayTimeObjectInspector) poi).getPrimitiveWritableObject(obj); + intervalDayTime.writeToByteStream(byteStream); + return; + } + case DECIMAL: { HiveDecimalObjectInspector bdoi = (HiveDecimalObjectInspector) poi; HiveDecimalWritable t = bdoi.getPrimitiveWritableObject(obj); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java index 11e8cf4..8b2cbcc 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java @@ -210,6 +210,16 @@ public static void checkObjectByteInfo(ObjectInspector objectInspector, recordInfo.elementOffset = 0; recordInfo.elementSize = TimestampWritable.getTotalLength(bytes, offset); break; + case INTERVAL_YEAR_MONTH: + recordInfo.elementOffset = 0; + recordInfo.elementSize = WritableUtils.decodeVIntSize(bytes[offset]); + break; + case INTERVAL_DAY_TIME: + recordInfo.elementOffset = 0; + int secondsSize = WritableUtils.decodeVIntSize(bytes[offset]); + int nanosSize = WritableUtils.decodeVIntSize(bytes[offset + secondsSize]); + recordInfo.elementSize = secondsSize + nanosSize; + break; case DECIMAL: // using vint instead of 4 bytes LazyBinaryUtils.readVInt(bytes, offset, vInt); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java index 2a47d97..8a7c4a5 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorConverters.java @@ -33,6 +33,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableFloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveCharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveDecimalObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveIntervalDayTimeObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveIntervalYearMonthObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector; @@ -120,6 +122,14 @@ private static Converter getConverter(PrimitiveObjectInspector inputOI, return new PrimitiveObjectInspectorConverter.TimestampConverter( inputOI, (SettableTimestampObjectInspector) outputOI); + case INTERVAL_YEAR_MONTH: + return new PrimitiveObjectInspectorConverter.HiveIntervalYearMonthConverter( + inputOI, + (SettableHiveIntervalYearMonthObjectInspector) outputOI); + case INTERVAL_DAY_TIME: + return new PrimitiveObjectInspectorConverter.HiveIntervalDayTimeConverter( + inputOI, + (SettableHiveIntervalDayTimeObjectInspector) outputOI); case BINARY: return new PrimitiveObjectInspectorConverter.BinaryConverter( inputOI, diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java index 5e67a07..15778af 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java @@ -34,6 +34,8 @@ import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; @@ -47,6 +49,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector; @@ -60,6 +64,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableFloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveCharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveDecimalObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveIntervalDayTimeObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveIntervalYearMonthObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector; @@ -542,6 +548,14 @@ public static int hashCode(Object o, ObjectInspector objIns) { TimestampWritable t = ((TimestampObjectInspector) poi) .getPrimitiveWritableObject(o); return t.hashCode(); + case INTERVAL_YEAR_MONTH: + HiveIntervalYearMonthWritable intervalYearMonth = ((HiveIntervalYearMonthObjectInspector) poi) + .getPrimitiveWritableObject(o); + return intervalYearMonth.hashCode(); + case INTERVAL_DAY_TIME: + HiveIntervalDayTimeWritable intervalDayTime = ((HiveIntervalDayTimeObjectInspector) poi) + .getPrimitiveWritableObject(o); + return intervalDayTime.hashCode(); case DECIMAL: return ((HiveDecimalObjectInspector) poi).getPrimitiveWritableObject(o).hashCode(); @@ -758,6 +772,20 @@ public static int compare(Object o1, ObjectInspector oi1, Object o2, .getPrimitiveWritableObject(o2); return t1.compareTo(t2); } + case INTERVAL_YEAR_MONTH: { + HiveIntervalYearMonthWritable i1 = ((HiveIntervalYearMonthObjectInspector) poi1) + .getPrimitiveWritableObject(o1); + HiveIntervalYearMonthWritable i2 = ((HiveIntervalYearMonthObjectInspector) poi2) + .getPrimitiveWritableObject(o2); + return i1.compareTo(i2); + } + case INTERVAL_DAY_TIME: { + HiveIntervalDayTimeWritable i1 = ((HiveIntervalDayTimeObjectInspector) poi1) + .getPrimitiveWritableObject(o1); + HiveIntervalDayTimeWritable i2 = ((HiveIntervalDayTimeObjectInspector) poi2) + .getPrimitiveWritableObject(o2); + return i1.compareTo(i2); + } case DECIMAL: { HiveDecimalWritable t1 = ((HiveDecimalObjectInspector) poi1) .getPrimitiveWritableObject(o1); @@ -1092,6 +1120,10 @@ private static boolean isInstanceOfSettablePrimitiveOI(PrimitiveObjectInspector return oi instanceof SettableDateObjectInspector; case TIMESTAMP: return oi instanceof SettableTimestampObjectInspector; + case INTERVAL_YEAR_MONTH: + return oi instanceof SettableHiveIntervalYearMonthObjectInspector; + case INTERVAL_DAY_TIME: + return oi instanceof SettableHiveIntervalDayTimeObjectInspector; case BINARY: return oi instanceof SettableBinaryObjectInspector; case DECIMAL: diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java index 05aed0a..70633f3 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/PrimitiveObjectInspector.java @@ -31,7 +31,8 @@ */ public static enum PrimitiveCategory { VOID, BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, STRING, - DATE, TIMESTAMP, BINARY, DECIMAL, VARCHAR, CHAR, UNKNOWN + DATE, TIMESTAMP, BINARY, DECIMAL, VARCHAR, CHAR, INTERVAL_YEAR_MONTH, INTERVAL_DAY_TIME, + UNKNOWN }; public PrimitiveTypeInfo getTypeInfo(); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveIntervalDayTimeObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveIntervalDayTimeObjectInspector.java new file mode 100644 index 0000000..70e9f55 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveIntervalDayTimeObjectInspector.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + +/** + * A HiveIntervalObjectInspector inspects an Object representing an Interval. + */ +public interface HiveIntervalDayTimeObjectInspector extends PrimitiveObjectInspector { + + HiveIntervalDayTimeWritable getPrimitiveWritableObject(Object o); + + HiveIntervalDayTime getPrimitiveJavaObject(Object o); +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveIntervalYearMonthObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveIntervalYearMonthObjectInspector.java new file mode 100644 index 0000000..b7b59a8 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/HiveIntervalYearMonthObjectInspector.java @@ -0,0 +1,32 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + +/** + * A HiveIntervalObjectInspector inspects an Object representing a year-month Interval. + */ +public interface HiveIntervalYearMonthObjectInspector extends PrimitiveObjectInspector { + + HiveIntervalYearMonthWritable getPrimitiveWritableObject(Object o); + + HiveIntervalYearMonth getPrimitiveJavaObject(Object o); +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveIntervalDayTimeObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveIntervalDayTimeObjectInspector.java new file mode 100644 index 0000000..649bcc5 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveIntervalDayTimeObjectInspector.java @@ -0,0 +1,57 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; + +public class JavaHiveIntervalDayTimeObjectInspector + extends AbstractPrimitiveJavaObjectInspector + implements SettableHiveIntervalDayTimeObjectInspector{ + + public JavaHiveIntervalDayTimeObjectInspector() { + super(TypeInfoFactory.intervalDayTimeTypeInfo); + } + + @Override + public HiveIntervalDayTime getPrimitiveJavaObject(Object o) { + return o == null ? null : (HiveIntervalDayTime) o; + } + + @Override + public HiveIntervalDayTimeWritable getPrimitiveWritableObject(Object o) { + return o == null ? null : new HiveIntervalDayTimeWritable((HiveIntervalDayTime) o); + } + + @Override + public Object set(Object o, HiveIntervalDayTime i) { + return i == null ? null : new HiveIntervalDayTime(i); + } + + @Override + public Object set(Object o, HiveIntervalDayTimeWritable i) { + return i == null ? null : i.getHiveIntervalDayTime(); + } + + @Override + public Object create(HiveIntervalDayTime i) { + return i == null ? null : new HiveIntervalDayTime(i); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveIntervalYearMonthObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveIntervalYearMonthObjectInspector.java new file mode 100644 index 0000000..68d2e22 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveIntervalYearMonthObjectInspector.java @@ -0,0 +1,57 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; + +public class JavaHiveIntervalYearMonthObjectInspector + extends AbstractPrimitiveJavaObjectInspector + implements SettableHiveIntervalYearMonthObjectInspector{ + + public JavaHiveIntervalYearMonthObjectInspector() { + super(TypeInfoFactory.intervalYearMonthTypeInfo); + } + + @Override + public HiveIntervalYearMonth getPrimitiveJavaObject(Object o) { + return o == null ? null : (HiveIntervalYearMonth) o; + } + + @Override + public HiveIntervalYearMonthWritable getPrimitiveWritableObject(Object o) { + return o == null ? null : new HiveIntervalYearMonthWritable((HiveIntervalYearMonth) o); + } + + @Override + public Object set(Object o, HiveIntervalYearMonth i) { + return i == null ? null : new HiveIntervalYearMonth(i); + } + + @Override + public Object set(Object o, HiveIntervalYearMonthWritable i) { + return i == null ? null : i.getHiveIntervalYearMonth(); + } + + @Override + public Object create(HiveIntervalYearMonth i) { + return i == null ? null : new HiveIntervalYearMonth(i); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java index cb996a8..43792e4 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorConverter.java @@ -23,6 +23,8 @@ import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; @@ -285,6 +287,46 @@ public Object convert(Object input) { } } + public static class HiveIntervalYearMonthConverter implements Converter { + PrimitiveObjectInspector inputOI; + SettableHiveIntervalYearMonthObjectInspector outputOI; + Object r; + + public HiveIntervalYearMonthConverter(PrimitiveObjectInspector inputOI, + SettableHiveIntervalYearMonthObjectInspector outputOI) { + this.inputOI = inputOI; + this.outputOI = outputOI; + r = outputOI.create(new HiveIntervalYearMonth()); + } + + public Object convert(Object input) { + if (input == null) { + return null; + } + return outputOI.set(r, PrimitiveObjectInspectorUtils.getHiveIntervalYearMonth(input, inputOI)); + } + } + + public static class HiveIntervalDayTimeConverter implements Converter { + PrimitiveObjectInspector inputOI; + SettableHiveIntervalDayTimeObjectInspector outputOI; + Object r; + + public HiveIntervalDayTimeConverter(PrimitiveObjectInspector inputOI, + SettableHiveIntervalDayTimeObjectInspector outputOI) { + this.inputOI = inputOI; + this.outputOI = outputOI; + r = outputOI.create(new HiveIntervalDayTime()); + } + + public Object convert(Object input) { + if (input == null) { + return null; + } + return outputOI.set(r, PrimitiveObjectInspectorUtils.getHiveIntervalDayTime(input, inputOI)); + } + } + public static class HiveDecimalConverter implements Converter { PrimitiveObjectInspector inputOI; @@ -419,6 +461,14 @@ public Text convert(Object input) { t.set(((TimestampObjectInspector) inputOI) .getPrimitiveWritableObject(input).toString()); return t; + case INTERVAL_YEAR_MONTH: + t.set(((HiveIntervalYearMonthObjectInspector) inputOI) + .getPrimitiveWritableObject(input).toString()); + return t; + case INTERVAL_DAY_TIME: + t.set(((HiveIntervalDayTimeObjectInspector) inputOI) + .getPrimitiveWritableObject(input).toString()); + return t; case BINARY: BinaryObjectInspector binaryOI = (BinaryObjectInspector) inputOI; if (binaryOI.preferWritable()) { diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java index f466297..a217270 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorFactory.java @@ -28,6 +28,8 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; @@ -85,6 +87,10 @@ new WritableDateObjectInspector(); public static final WritableTimestampObjectInspector writableTimestampObjectInspector = new WritableTimestampObjectInspector(); + public static final WritableHiveIntervalYearMonthObjectInspector writableHiveIntervalYearMonthObjectInspector = + new WritableHiveIntervalYearMonthObjectInspector(); + public static final WritableHiveIntervalDayTimeObjectInspector writableHiveIntervalDayTimeObjectInspector = + new WritableHiveIntervalDayTimeObjectInspector(); public static final WritableBinaryObjectInspector writableBinaryObjectInspector = new WritableBinaryObjectInspector(); public static final WritableHiveDecimalObjectInspector writableHiveDecimalObjectInspector = @@ -118,6 +124,10 @@ writableDateObjectInspector); cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME), writableTimestampObjectInspector); + cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME), + writableHiveIntervalYearMonthObjectInspector); + cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME), + writableHiveIntervalDayTimeObjectInspector); cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BINARY_TYPE_NAME), writableBinaryObjectInspector); cachedPrimitiveWritableInspectorCache.put(TypeInfoFactory.decimalTypeInfo, writableHiveDecimalObjectInspector); @@ -139,6 +149,8 @@ primitiveCategoryToWritableOI.put(PrimitiveCategory.VOID, writableVoidObjectInspector); primitiveCategoryToWritableOI.put(PrimitiveCategory.DATE, writableDateObjectInspector); primitiveCategoryToWritableOI.put(PrimitiveCategory.TIMESTAMP, writableTimestampObjectInspector); + primitiveCategoryToWritableOI.put(PrimitiveCategory.INTERVAL_YEAR_MONTH, writableHiveIntervalYearMonthObjectInspector); + primitiveCategoryToWritableOI.put(PrimitiveCategory.INTERVAL_DAY_TIME, writableHiveIntervalDayTimeObjectInspector); primitiveCategoryToWritableOI.put(PrimitiveCategory.BINARY, writableBinaryObjectInspector); primitiveCategoryToWritableOI.put(PrimitiveCategory.DECIMAL, writableHiveDecimalObjectInspector); } @@ -169,6 +181,10 @@ new JavaDateObjectInspector(); public static final JavaTimestampObjectInspector javaTimestampObjectInspector = new JavaTimestampObjectInspector(); + public static final JavaHiveIntervalYearMonthObjectInspector javaHiveIntervalYearMonthObjectInspector = + new JavaHiveIntervalYearMonthObjectInspector(); + public static final JavaHiveIntervalDayTimeObjectInspector javaHiveIntervalDayTimeObjectInspector = + new JavaHiveIntervalDayTimeObjectInspector(); public static final JavaBinaryObjectInspector javaByteArrayObjectInspector = new JavaBinaryObjectInspector(); public static final JavaHiveDecimalObjectInspector javaHiveDecimalObjectInspector = @@ -202,6 +218,10 @@ javaDateObjectInspector); cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME), javaTimestampObjectInspector); + cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME), + javaHiveIntervalYearMonthObjectInspector); + cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME), + javaHiveIntervalDayTimeObjectInspector); cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.BINARY_TYPE_NAME), javaByteArrayObjectInspector); cachedPrimitiveJavaInspectorCache.put(TypeInfoFactory.decimalTypeInfo, javaHiveDecimalObjectInspector); @@ -223,6 +243,8 @@ primitiveCategoryToJavaOI.put(PrimitiveCategory.VOID, javaVoidObjectInspector); primitiveCategoryToJavaOI.put(PrimitiveCategory.DATE, javaDateObjectInspector); primitiveCategoryToJavaOI.put(PrimitiveCategory.TIMESTAMP, javaTimestampObjectInspector); + primitiveCategoryToJavaOI.put(PrimitiveCategory.INTERVAL_YEAR_MONTH, javaHiveIntervalYearMonthObjectInspector); + primitiveCategoryToJavaOI.put(PrimitiveCategory.INTERVAL_DAY_TIME, javaHiveIntervalDayTimeObjectInspector); primitiveCategoryToJavaOI.put(PrimitiveCategory.BINARY, javaByteArrayObjectInspector); primitiveCategoryToJavaOI.put(PrimitiveCategory.DECIMAL, javaHiveDecimalObjectInspector); } @@ -310,6 +332,10 @@ public static ConstantObjectInspector getPrimitiveWritableConstantObjectInspecto return new WritableConstantDateObjectInspector((DateWritable)value); case TIMESTAMP: return new WritableConstantTimestampObjectInspector((TimestampWritable)value); + case INTERVAL_YEAR_MONTH: + return new WritableConstantHiveIntervalYearMonthObjectInspector((HiveIntervalYearMonthWritable) value); + case INTERVAL_DAY_TIME: + return new WritableConstantHiveIntervalDayTimeObjectInspector((HiveIntervalDayTimeWritable) value); case DECIMAL: return new WritableConstantHiveDecimalObjectInspector((DecimalTypeInfo)typeInfo, (HiveDecimalWritable)value); case BINARY: diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java index 5ccacf1..78064a6 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/PrimitiveObjectInspectorUtils.java @@ -31,6 +31,8 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.io.ByteWritable; @@ -38,6 +40,8 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; @@ -221,6 +225,12 @@ static void registerType(PrimitiveTypeEntry t) { public static final PrimitiveTypeEntry timestampTypeEntry = new PrimitiveTypeEntry( PrimitiveCategory.TIMESTAMP, serdeConstants.TIMESTAMP_TYPE_NAME, null, Timestamp.class, TimestampWritable.class); + public static final PrimitiveTypeEntry intervalYearMonthTypeEntry = new PrimitiveTypeEntry( + PrimitiveCategory.INTERVAL_YEAR_MONTH, serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, null, + HiveIntervalYearMonth.class, HiveIntervalYearMonthWritable.class); + public static final PrimitiveTypeEntry intervalDayTimeTypeEntry = new PrimitiveTypeEntry( + PrimitiveCategory.INTERVAL_DAY_TIME, serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME, null, + HiveIntervalDayTime.class, HiveIntervalDayTimeWritable.class); public static final PrimitiveTypeEntry decimalTypeEntry = new PrimitiveTypeEntry( PrimitiveCategory.DECIMAL, serdeConstants.DECIMAL_TYPE_NAME, null, HiveDecimal.class, HiveDecimalWritable.class); @@ -250,6 +260,8 @@ static void registerType(PrimitiveTypeEntry t) { registerType(shortTypeEntry); registerType(dateTypeEntry); registerType(timestampTypeEntry); + registerType(intervalYearMonthTypeEntry); + registerType(intervalDayTimeTypeEntry); registerType(decimalTypeEntry); registerType(unknownTypeEntry); } @@ -426,6 +438,14 @@ public static boolean comparePrimitiveObjects(Object o1, return ((TimestampObjectInspector) oi1).getPrimitiveWritableObject(o1) .equals(((TimestampObjectInspector) oi2).getPrimitiveWritableObject(o2)); } + case INTERVAL_YEAR_MONTH: { + return ((HiveIntervalYearMonthObjectInspector) oi1).getPrimitiveWritableObject(o1) + .equals(((HiveIntervalYearMonthObjectInspector) oi2).getPrimitiveWritableObject(o2)); + } + case INTERVAL_DAY_TIME: { + return ((HiveIntervalDayTimeObjectInspector) oi1).getPrimitiveWritableObject(o1) + .equals(((HiveIntervalDayTimeObjectInspector) oi2).getPrimitiveWritableObject(o2)); + } case BINARY: { return ((BinaryObjectInspector) oi1).getPrimitiveWritableObject(o1). equals(((BinaryObjectInspector) oi2).getPrimitiveWritableObject(o2)); @@ -836,6 +856,12 @@ public static String getString(Object o, PrimitiveObjectInspector oi) { case TIMESTAMP: result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).toString(); break; + case INTERVAL_YEAR_MONTH: + result = ((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveWritableObject(o).toString(); + break; + case INTERVAL_DAY_TIME: + result = ((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveWritableObject(o).toString(); + break; case DECIMAL: result = ((HiveDecimalObjectInspector) oi) .getPrimitiveJavaObject(o).toString(); @@ -1103,6 +1129,71 @@ static Timestamp getTimestampFromString(String s) { return result; } + public static HiveIntervalYearMonth getHiveIntervalYearMonth(Object o, PrimitiveObjectInspector oi) { + if (o == null) { + return null; + } + + HiveIntervalYearMonth result = null; + switch (oi.getPrimitiveCategory()) { + case VOID: + result = null; + break; + case STRING: + case CHAR: + case VARCHAR: { + try { + String val = getString(o, oi).trim(); + result = HiveIntervalYearMonth.valueOf(val); + } catch (IllegalArgumentException e) { + result = null; + } + break; + } + case INTERVAL_YEAR_MONTH: + result = ((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveJavaObject(o); + break; + + default: + throw new RuntimeException("Cannot convert to IntervalYearMonth from: " + oi.getTypeName()); + } + + return result; + } + + + public static HiveIntervalDayTime getHiveIntervalDayTime(Object o, PrimitiveObjectInspector oi) { + if (o == null) { + return null; + } + + HiveIntervalDayTime result = null; + switch (oi.getPrimitiveCategory()) { + case VOID: + result = null; + break; + case STRING: + case CHAR: + case VARCHAR: { + try { + String val = getString(o, oi).trim(); + result = HiveIntervalDayTime.valueOf(val); + } catch (IllegalArgumentException e) { + result = null; + } + break; + } + case INTERVAL_DAY_TIME: + result = ((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveJavaObject(o); + break; + + default: + throw new RuntimeException("Cannot convert to IntervalDayTime from: " + oi.getTypeName()); + } + + return result; + } + public static Class getJavaPrimitiveClassFromObjectInspector(ObjectInspector oi) { if (oi.getCategory() != Category.PRIMITIVE) { return null; @@ -1146,6 +1237,9 @@ public static PrimitiveGrouping getPrimitiveGrouping(PrimitiveCategory primitive return PrimitiveGrouping.BOOLEAN_GROUP; case TIMESTAMP: case DATE: + // Interval types are also DATE_GROUP? + case INTERVAL_YEAR_MONTH: + case INTERVAL_DAY_TIME: return PrimitiveGrouping.DATE_GROUP; case BINARY: return PrimitiveGrouping.BINARY_GROUP; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveIntervalDayTimeObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveIntervalDayTimeObjectInspector.java new file mode 100644 index 0000000..43fef59 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveIntervalDayTimeObjectInspector.java @@ -0,0 +1,34 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; + + +/** + * A SettableHiveIntervalDayTimeObjectInspector can set a IntervalDayTime value to an object. + */ +public interface SettableHiveIntervalDayTimeObjectInspector + extends HiveIntervalDayTimeObjectInspector { + Object set(Object o, HiveIntervalDayTime i); + + Object set(Object o, HiveIntervalDayTimeWritable i); + + Object create(HiveIntervalDayTime i); +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveIntervalYearMonthObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveIntervalYearMonthObjectInspector.java new file mode 100644 index 0000000..07c20e4 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/SettableHiveIntervalYearMonthObjectInspector.java @@ -0,0 +1,34 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; + + +/** + * A SettableHiveIntervalYearMonthObjectInspector can set a IntervalYearMonth value to an object. + */ +public interface SettableHiveIntervalYearMonthObjectInspector + extends HiveIntervalYearMonthObjectInspector { + Object set(Object o, HiveIntervalYearMonth i); + + Object set(Object o, HiveIntervalYearMonthWritable i); + + Object create(HiveIntervalYearMonth i); +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveIntervalDayTimeObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveIntervalDayTimeObjectInspector.java new file mode 100644 index 0000000..6ca5a60 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveIntervalDayTimeObjectInspector.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; + +public class WritableConstantHiveIntervalDayTimeObjectInspector + extends WritableHiveIntervalDayTimeObjectInspector + implements ConstantObjectInspector { + + private HiveIntervalDayTimeWritable value; + + protected WritableConstantHiveIntervalDayTimeObjectInspector() { + super(); + } + + WritableConstantHiveIntervalDayTimeObjectInspector(HiveIntervalDayTimeWritable value) { + super(); + this.value = value; + } + + @Override + public Object getWritableConstantValue() { + return value; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveIntervalYearMonthObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveIntervalYearMonthObjectInspector.java new file mode 100644 index 0000000..0840fe7 --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableConstantHiveIntervalYearMonthObjectInspector.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; +import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; + +public class WritableConstantHiveIntervalYearMonthObjectInspector + extends WritableHiveIntervalYearMonthObjectInspector + implements ConstantObjectInspector { + + private HiveIntervalYearMonthWritable value; + + protected WritableConstantHiveIntervalYearMonthObjectInspector() { + super(); + } + + WritableConstantHiveIntervalYearMonthObjectInspector(HiveIntervalYearMonthWritable value) { + super(); + this.value = value; + } + + @Override + public Object getWritableConstantValue() { + return value; + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveIntervalDayTimeObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveIntervalDayTimeObjectInspector.java new file mode 100644 index 0000000..d9c9aab --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveIntervalDayTimeObjectInspector.java @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; + +public class WritableHiveIntervalDayTimeObjectInspector + extends AbstractPrimitiveWritableObjectInspector + implements SettableHiveIntervalDayTimeObjectInspector{ + + public WritableHiveIntervalDayTimeObjectInspector() { + super(TypeInfoFactory.intervalDayTimeTypeInfo); + } + + @Override + public HiveIntervalDayTime getPrimitiveJavaObject(Object o) { + return o == null ? null : ((HiveIntervalDayTimeWritable) o).getHiveIntervalDayTime(); + } + + @Override + public HiveIntervalDayTimeWritable getPrimitiveWritableObject(Object o) { + return o == null ? null : (HiveIntervalDayTimeWritable) o; + } + + @Override + public Object copyObject(Object o) { + return o == null ? null : new HiveIntervalDayTimeWritable((HiveIntervalDayTimeWritable) o); + } + + @Override + public Object set(Object o, HiveIntervalDayTime i) { + if (i == null) { + return null; + } + ((HiveIntervalDayTimeWritable) o).set(i); + return o; + } + + @Override + public Object set(Object o, HiveIntervalDayTimeWritable i) { + if (i == null) { + return null; + } + ((HiveIntervalDayTimeWritable) o).set(i); + return o; + } + + @Override + public Object create(HiveIntervalDayTime i) { + return i == null ? null : new HiveIntervalDayTimeWritable(i); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveIntervalYearMonthObjectInspector.java b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveIntervalYearMonthObjectInspector.java new file mode 100644 index 0000000..a9391af --- /dev/null +++ b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveIntervalYearMonthObjectInspector.java @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.objectinspector.primitive; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; + +public class WritableHiveIntervalYearMonthObjectInspector + extends AbstractPrimitiveWritableObjectInspector + implements SettableHiveIntervalYearMonthObjectInspector{ + + public WritableHiveIntervalYearMonthObjectInspector() { + super(TypeInfoFactory.intervalYearMonthTypeInfo); + } + + @Override + public HiveIntervalYearMonth getPrimitiveJavaObject(Object o) { + return o == null ? null : ((HiveIntervalYearMonthWritable) o).getHiveIntervalYearMonth(); + } + + @Override + public HiveIntervalYearMonthWritable getPrimitiveWritableObject(Object o) { + return o == null ? null : (HiveIntervalYearMonthWritable) o; + } + + @Override + public Object copyObject(Object o) { + return o == null ? null : new HiveIntervalYearMonthWritable((HiveIntervalYearMonthWritable) o); + } + + @Override + public Object set(Object o, HiveIntervalYearMonth i) { + if (i == null) { + return null; + } + ((HiveIntervalYearMonthWritable) o).set(i); + return o; + } + + @Override + public Object set(Object o, HiveIntervalYearMonthWritable i) { + if (i == null) { + return null; + } + ((HiveIntervalYearMonthWritable) o).set(i); + return o; + } + + @Override + public Object create(HiveIntervalYearMonth i) { + return i == null ? null : new HiveIntervalYearMonthWritable(i); + } +} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java index e5c9f18..c7bc936 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfoFactory.java @@ -59,6 +59,8 @@ private TypeInfoFactory() { public static final PrimitiveTypeInfo shortTypeInfo = new PrimitiveTypeInfo(serdeConstants.SMALLINT_TYPE_NAME); public static final PrimitiveTypeInfo dateTypeInfo = new PrimitiveTypeInfo(serdeConstants.DATE_TYPE_NAME); public static final PrimitiveTypeInfo timestampTypeInfo = new PrimitiveTypeInfo(serdeConstants.TIMESTAMP_TYPE_NAME); + public static final PrimitiveTypeInfo intervalYearMonthTypeInfo = new PrimitiveTypeInfo(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME); + public static final PrimitiveTypeInfo intervalDayTimeTypeInfo = new PrimitiveTypeInfo(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME); public static final PrimitiveTypeInfo binaryTypeInfo = new PrimitiveTypeInfo(serdeConstants.BINARY_TYPE_NAME); /** @@ -87,6 +89,8 @@ private TypeInfoFactory() { cachedPrimitiveTypeInfo.put(serdeConstants.SMALLINT_TYPE_NAME, shortTypeInfo); cachedPrimitiveTypeInfo.put(serdeConstants.DATE_TYPE_NAME, dateTypeInfo); cachedPrimitiveTypeInfo.put(serdeConstants.TIMESTAMP_TYPE_NAME, timestampTypeInfo); + cachedPrimitiveTypeInfo.put(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME, intervalYearMonthTypeInfo); + cachedPrimitiveTypeInfo.put(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME, intervalDayTimeTypeInfo); cachedPrimitiveTypeInfo.put(serdeConstants.BINARY_TYPE_NAME, binaryTypeInfo); cachedPrimitiveTypeInfo.put(decimalTypeInfo.getQualifiedName(), decimalTypeInfo); cachedPrimitiveTypeInfo.put("unknown", unknownTypeInfo); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveIntervalDayTimeWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveIntervalDayTimeWritable.java new file mode 100644 index 0000000..8d7e079 --- /dev/null +++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveIntervalDayTimeWritable.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.io; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutput; +import java.io.DataOutputStream; + +import com.google.code.tempusfugit.concurrency.annotations.*; +import com.google.code.tempusfugit.concurrency.*; + +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.junit.*; + +import static org.junit.Assert.*; + +public class TestHiveIntervalDayTimeWritable { + @Rule public ConcurrentRule concurrentRule = new ConcurrentRule(); + @Rule public RepeatingRule repeatingRule = new RepeatingRule(); + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testConstructor() throws Exception { + HiveIntervalDayTime hi1 = HiveIntervalDayTime.valueOf("3 4:5:6.12345"); + HiveIntervalDayTimeWritable hiw1 = new HiveIntervalDayTimeWritable(hi1); + HiveIntervalDayTimeWritable hiw2 = new HiveIntervalDayTimeWritable(hiw1); + assertEquals(hiw1, hiw2); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testComparison() throws Exception { + HiveIntervalDayTimeWritable hiw0 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("2 2:2:2.22222")); + HiveIntervalDayTimeWritable hiw1 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("2 2:2:2.22222")); + HiveIntervalDayTimeWritable hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("3 2:2:2.22222")); + + assertTrue(hiw1 + " equals " + hiw1, hiw1.equals(hiw1)); + assertTrue(hiw1 + " equals " + hiw0, hiw1.equals(hiw0)); + assertFalse(hiw1 + " equals " + hiw2, hiw1.equals(hiw2)); + + assertTrue(hiw1 + " compare " + hiw1, 0 == hiw1.compareTo(hiw1)); + assertTrue(hiw1 + " compare " + hiw0, 0 == hiw1.compareTo(hiw0)); + assertTrue(hiw1 + " compare " + hiw2, 0 > hiw1.compareTo(hiw2)); + + hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("3 2:2:2.22222")); + assertTrue(hiw1 + " compare " + hiw2, 0 > hiw1.compareTo(hiw2)); + hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("1 2:2:2.22222")); + assertTrue(hiw1 + " compare " + hiw2, 0 < hiw1.compareTo(hiw2)); + + hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("2 3:2:2.22222")); + assertTrue(hiw1 + " compare " + hiw2, 0 > hiw1.compareTo(hiw2)); + hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("2 1:2:2.22222")); + assertTrue(hiw1 + " compare " + hiw2, 0 < hiw1.compareTo(hiw2)); + + hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("2 2:3:2.22222")); + assertTrue(hiw1 + " compare " + hiw2, 0 > hiw1.compareTo(hiw2)); + hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("2 2:1:2.22222")); + assertTrue(hiw1 + " compare " + hiw2, 0 < hiw1.compareTo(hiw2)); + + hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("2 2:2:3.22222")); + assertTrue(hiw1 + " compare " + hiw2, 0 > hiw1.compareTo(hiw2)); + hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("2 2:2:1.22222")); + assertTrue(hiw1 + " compare " + hiw2, 0 < hiw1.compareTo(hiw2)); + + hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("2 2:2:2.33333")); + assertTrue(hiw1 + " compare " + hiw2, 0 > hiw1.compareTo(hiw2)); + hiw2 = new HiveIntervalDayTimeWritable( + HiveIntervalDayTime.valueOf("2 2:2:2.11111")); + assertTrue(hiw1 + " compare " + hiw2, 0 < hiw1.compareTo(hiw2)); + + // Also check hashCode() + assertEquals(hiw0.hashCode(), hiw1.hashCode()); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testGettersSetters() throws Exception { + HiveIntervalDayTimeWritable hiw1 = new HiveIntervalDayTimeWritable(); + + hiw1.set(3, 4, 5, 6, 7); + + HiveIntervalDayTime hi1 = hiw1.getHiveIntervalDayTime(); + assertEquals(3, hi1.getDays()); + assertEquals(4, hi1.getHours()); + assertEquals(5, hi1.getMinutes()); + assertEquals(6, hi1.getSeconds()); + assertEquals(7, hi1.getNanos()); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testWritableMethods() throws Exception { + HiveIntervalDayTimeWritable hiw1 = new HiveIntervalDayTimeWritable(); + HiveIntervalDayTimeWritable hiw2 = new HiveIntervalDayTimeWritable(); + + hiw1.set(3, 4, 5, 6, 7); + hiw2.set(5, 4, 3, 2, 1); + assertFalse(hiw1.equals(hiw2)); + + ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); + DataOutput out = new DataOutputStream(byteStream); + + hiw1.write(out); + hiw2.readFields(new DataInputStream(new ByteArrayInputStream(byteStream.toByteArray()))); + assertEquals(hiw1, hiw2); + } +} diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveIntervalYearMonthWritable.java b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveIntervalYearMonthWritable.java new file mode 100644 index 0000000..345724c --- /dev/null +++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestHiveIntervalYearMonthWritable.java @@ -0,0 +1,122 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.serde2.io; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutput; +import java.io.DataOutputStream; + +import com.google.code.tempusfugit.concurrency.annotations.*; +import com.google.code.tempusfugit.concurrency.*; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.junit.*; + +import static org.junit.Assert.*; + +public class TestHiveIntervalYearMonthWritable { + @Rule public ConcurrentRule concurrentRule = new ConcurrentRule(); + @Rule public RepeatingRule repeatingRule = new RepeatingRule(); + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testConstructor() throws Exception { + HiveIntervalYearMonth hi1 = HiveIntervalYearMonth.valueOf("1-2"); + HiveIntervalYearMonthWritable hiw1 = new HiveIntervalYearMonthWritable(hi1); + HiveIntervalYearMonthWritable hiw2 = new HiveIntervalYearMonthWritable(hiw1); + assertEquals(hiw1, hiw2); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testComparison() throws Exception { + HiveIntervalYearMonthWritable hiw0 = new HiveIntervalYearMonthWritable( + HiveIntervalYearMonth.valueOf("2-2")); + HiveIntervalYearMonthWritable hiw1 = new HiveIntervalYearMonthWritable( + HiveIntervalYearMonth.valueOf("2-2")); + HiveIntervalYearMonthWritable hiw2 = new HiveIntervalYearMonthWritable( + HiveIntervalYearMonth.valueOf("3-2")); + + assertTrue(hiw1 + " equals " + hiw1, hiw1.equals(hiw1)); + assertTrue(hiw1 + " equals " + hiw0, hiw1.equals(hiw0)); + assertFalse(hiw1 + " equals " + hiw2, hiw1.equals(hiw2)); + + assertTrue(hiw1 + " compare " + hiw1, 0 == hiw1.compareTo(hiw1)); + assertTrue(hiw1 + " compare " + hiw0, 0 == hiw1.compareTo(hiw0)); + assertTrue(hiw1 + " compare " + hiw2, 0 > hiw1.compareTo(hiw2)); + hiw2 = new HiveIntervalYearMonthWritable( + HiveIntervalYearMonth.valueOf("1-2")); + assertTrue(hiw1 + " compare " + hiw2, 0 < hiw1.compareTo(hiw2)); + + hiw2 = new HiveIntervalYearMonthWritable( + HiveIntervalYearMonth.valueOf("2-3")); + assertTrue(hiw1 + " compare " + hiw2, 0 > hiw1.compareTo(hiw2)); + hiw2 = new HiveIntervalYearMonthWritable( + HiveIntervalYearMonth.valueOf("2-1")); + assertTrue(hiw1 + " compare " + hiw2, 0 < hiw1.compareTo(hiw2)); + + // Also check hashCode() + assertEquals(hiw0.hashCode(), hiw1.hashCode()); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testGettersSetters() throws Exception { + HiveIntervalYearMonthWritable hiw1 = new HiveIntervalYearMonthWritable(); + + hiw1.set(1, 2); + HiveIntervalYearMonth hi1 = hiw1.getHiveIntervalYearMonth(); + assertEquals(1, hi1.getYears()); + assertEquals(2, hi1.getMonths()); + + hiw1.set(new HiveIntervalYearMonth(3,4)); + hi1 = hiw1.getHiveIntervalYearMonth(); + assertEquals(3, hi1.getYears()); + assertEquals(4, hi1.getMonths()); + + hiw1.set(new HiveIntervalYearMonthWritable(new HiveIntervalYearMonth(5,6))); + hi1 = hiw1.getHiveIntervalYearMonth(); + assertEquals(5, hi1.getYears()); + assertEquals(6, hi1.getMonths()); + } + + @Test + @Concurrent(count=4) + @Repeating(repetition=100) + public void testWritableMethods() throws Exception { + HiveIntervalYearMonthWritable hiw1 = new HiveIntervalYearMonthWritable(); + HiveIntervalYearMonthWritable hiw2 = new HiveIntervalYearMonthWritable(); + + hiw1.set(1, 2); + hiw2.set(7, 6); + assertFalse(hiw1.equals(hiw2)); + + ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); + DataOutput out = new DataOutputStream(byteStream); + + hiw1.write(out); + hiw2.readFields(new DataInputStream(new ByteArrayInputStream(byteStream.toByteArray()))); + assertEquals(hiw1, hiw2); + } +}