diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java index 076ca51901..e040bbbe93 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java @@ -71,7 +71,8 @@ TIMESTAMP_MINUS_INTERVALYM, INTERVALDT_MINUS_INTERVALDT, TIMESTAMP_MINUS_INTERVALDT, - TIMESTAMP_MINUS_TIMESTAMP + TIMESTAMP_MINUS_TIMESTAMP, + DATE_MINUS_INT }; public GenericUDFOPDTIMinus() { @@ -117,6 +118,7 @@ public ObjectInspector initialize(ObjectInspector[] arguments) // Timestamp - Timestamp = IntervalDayTime // Date - Date = IntervalDayTime // Timestamp - Date = IntervalDayTime (operands reversible) + // Date - Int = Date if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) { minusOpType = OperationType.INTERVALYM_MINUS_INTERVALYM; intervalArg1Idx = 0; @@ -161,6 +163,13 @@ public ObjectInspector initialize(ObjectInspector[] arguments) TypeInfoFactory.intervalDayTimeTypeInfo); dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI); dt2Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI); + } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INT)) { + minusOpType = OperationType.DATE_MINUS_INT; + intervalArg1Idx = 1; + dtArg1Idx = 0; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.dateTypeInfo); + dt1Converter = ObjectInspectorConverters.getConverter(leftOI, resultOI); } else { // Unsupported types - error List argTypeInfos = new ArrayList(2); @@ -217,8 +226,15 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { arguments[dtArg2Idx].get(), inputOIs[dtArg2Idx]); return handleIntervalDayTimeResult(dtm.subtract(ts1, ts2)); } + case DATE_MINUS_INT: { + int intVal = PrimitiveObjectInspectorUtils.getInt(arguments[intervalArg1Idx].get(), + inputOIs[intervalArg1Idx]); + Date dt1 = PrimitiveObjectInspectorUtils.getDate( + arguments[dtArg1Idx].get(), inputOIs[dtArg1Idx]); + return handleDateResult(dtm.subtract(dt1, intVal)); + } default: - throw new HiveException("Unknown PlusOpType " + minusOpType); + throw new HiveException("Unknown MinusOpType " + minusOpType); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java index 9295c8fdfb..9c9674e8dc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java @@ -69,6 +69,7 @@ INTERVALYM_PLUS_TIMESTAMP, INTERVALDT_PLUS_INTERVALDT, INTERVALDT_PLUS_TIMESTAMP, + DATE_PLUS_INT, }; public GenericUDFOPDTIPlus() { @@ -111,6 +112,7 @@ public ObjectInspector initialize(ObjectInspector[] arguments) // IntervalDayTime + IntervalDayTime = IntervalDayTime // IntervalDayTime + Date = Timestamp (operands reversible) // IntervalDayTime + Timestamp = Timestamp (operands reversible) + // Date + Int = Date if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) { plusOpType = OperationType.INTERVALYM_PLUS_INTERVALYM; intervalArg1Idx = 0; @@ -163,6 +165,13 @@ public ObjectInspector initialize(ObjectInspector[] arguments) resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( TypeInfoFactory.timestampTypeInfo); dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI); + } else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INT)) { + plusOpType = OperationType.DATE_PLUS_INT; + intervalArg1Idx = 1; + dtArgIdx = 0; + resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( + TypeInfoFactory.dateTypeInfo); + dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI); } else { // Unsupported types - error List argTypeInfos = new ArrayList(2); @@ -212,6 +221,13 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { arguments[dtArgIdx].get(), inputOIs[dtArgIdx]); return handleTimestampResult(dtm.add(ts1, idt1)); } + case DATE_PLUS_INT: { + int intVal = PrimitiveObjectInspectorUtils.getInt(arguments[intervalArg1Idx].get(), + inputOIs[intervalArg1Idx]); + Date dt1 = PrimitiveObjectInspectorUtils.getDate( + arguments[dtArgIdx].get(), inputOIs[dtArgIdx]); + return handleDateResult(dtm.add(dt1, intVal)); + } default: throw new HiveException("Unknown PlusOpType " + plusOpType); } diff --git ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java index 16babbf7a7..3195f1a6bf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java +++ ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java @@ -207,7 +207,22 @@ public boolean add(Date dt, HiveIntervalYearMonth interval, Date result) { result.setTimeInMillis(resultMillis); return true; } - + + /** + * Perform date + int operation. + * @param dt the date + * @param interval the int (days) + * @param result the resulting date + * @return was it successful + */ + public boolean add(Date dt, int interval, Date result) { + if (dt == null) { + return false; + } + result.setTimeInDays(dt.toEpochDay() + interval); + return true; + } + @Deprecated public boolean add(java.sql.Date dt, HiveIntervalYearMonth interval, java.sql.Date result) { if (dt == null || interval == null) { @@ -229,6 +244,23 @@ public Date add(HiveIntervalYearMonth interval, Date dt) { return dtResult; } + + /** + * Perform date + int operation. + * @param dt the date + * @param interval the int (days) + * @return the resulting date + */ + public Date add(Date dt, int interval) { + if (dt == null) { + return null; + } + + Date dtResult = new Date(); + add(dt, interval, dtResult); + + return dtResult; + } @Deprecated public java.sql.Date add(HiveIntervalYearMonth interval, java.sql.Date dt) { @@ -321,6 +353,38 @@ public Date subtract(Date left, HiveIntervalYearMonth right) { return dtResult; } + + /** + * Perform date - int operation. + * @param left the date + * @param right the int (days) + * @return the resulting date + */ + public Date subtract(Date left, int right) { + if (left == null) { + return null; + } + + Date dtResult = new Date(); + subtract(left, right, dtResult); + + return dtResult; + } + + /** + * Perform date - int operation. + * @param dt the date + * @param interval the int (days) + * @param result the result that + * @return was it successful + */ + public boolean subtract(Date dt, int interval, Date result) { + if (dt == null) { + return false; + } + result.setTimeInDays(dt.toEpochDay() - interval); + return true; + } @Deprecated public java.sql.Date subtract(java.sql.Date left, HiveIntervalYearMonth right) { diff --git ql/src/test/queries/clientpositive/date_int_operation_test.q ql/src/test/queries/clientpositive/date_int_operation_test.q new file mode 100644 index 0000000000..05d97d75d7 --- /dev/null +++ ql/src/test/queries/clientpositive/date_int_operation_test.q @@ -0,0 +1,5 @@ +--! qt:dataset:srcpart +--! qt:dataset:src +--! qt:dataset:alltypesorc +select date('2001-01-28') + 3; +select date('2001-01-28') - 3; diff --git ql/src/test/results/clientpositive/date_int_operation_test.q.out ql/src/test/results/clientpositive/date_int_operation_test.q.out new file mode 100644 index 0000000000..c6e40b9ad3 --- /dev/null +++ ql/src/test/results/clientpositive/date_int_operation_test.q.out @@ -0,0 +1,18 @@ +PREHOOK: query: select date('2001-01-28') + 3 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select date('2001-01-28') + 3 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +2001-01-31 00:00:00 +PREHOOK: query: select date('2001-01-28') - 3 +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +POSTHOOK: query: select date('2001-01-28') - 3 +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +#### A masked pattern was here #### +2001-01-25 00:00:00