diff --git a/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java b/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java index 375c173..f7575a3 100644 --- a/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java +++ b/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java @@ -35,6 +35,89 @@ private static String [][] templateExpansions = { + // The following datetime/interval arithmetic operations can be done using the vectorized values + {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "interval_year_month", "interval_year_month", "+"}, + {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "interval_year_month", "interval_year_month", "+"}, + {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "interval_year_month", "interval_year_month", "+"}, + + {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "interval_year_month", "interval_year_month", "-"}, + {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "interval_year_month", "interval_year_month", "-"}, + {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "interval_year_month", "interval_year_month", "-"}, + + {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "interval_day_time", "interval_day_time", "+"}, + {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "interval_day_time", "+"}, + {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "interval_day_time", "+"}, + + {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "interval_day_time", "interval_day_time", "-"}, + {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "interval_day_time", "interval_day_time", "-"}, + {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "interval_day_time", "interval_day_time", "-"}, + + {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "interval_day_time", "timestamp", "+"}, + {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "timestamp", "+"}, + {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "interval_day_time", "timestamp", "+"}, + + {"DTIColumnArithmeticDTIScalarNoConvert", "Add", "timestamp", "interval_day_time", "+"}, + {"DTIScalarArithmeticDTIColumnNoConvert", "Add", "timestamp", "interval_day_time", "+"}, + {"DTIColumnArithmeticDTIColumnNoConvert", "Add", "timestamp", "interval_day_time", "+"}, + + {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "timestamp", "interval_day_time", "-"}, + {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "interval_day_time", "-"}, + {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "interval_day_time", "-"}, + + {"DTIColumnArithmeticDTIScalarNoConvert", "Subtract", "timestamp", "timestamp", "-"}, + {"DTIScalarArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "timestamp", "-"}, + {"DTIColumnArithmeticDTIColumnNoConvert", "Subtract", "timestamp", "timestamp", "-"}, + + // The following datetime/interval arithmetic functions require type conversion for one or both operands + {"ColumnArithmeticColumnWithConvert", "Subtract", "date", "date", "-", "TimestampUtils.daysToNanoseconds", "TimestampUtils.daysToNanoseconds"}, + {"ScalarArithmeticColumnWithConvert", "Subtract", "date", "date", "-", "TimestampUtils.daysToNanoseconds", "TimestampUtils.daysToNanoseconds"}, + {"ColumnArithmeticScalarWithConvert", "Subtract", "date", "date", "-", "TimestampUtils.daysToNanoseconds", "TimestampUtils.daysToNanoseconds"}, + + {"ColumnArithmeticColumnWithConvert", "Subtract", "date", "timestamp", "-", "TimestampUtils.daysToNanoseconds", ""}, + {"ScalarArithmeticColumnWithConvert", "Subtract", "date", "timestamp", "-", "TimestampUtils.daysToNanoseconds", ""}, + {"ColumnArithmeticScalarWithConvert", "Subtract", "date", "timestamp", "-", "TimestampUtils.daysToNanoseconds", ""}, + + {"ColumnArithmeticColumnWithConvert", "Subtract", "timestamp", "date", "-", "", "TimestampUtils.daysToNanoseconds"}, + {"ScalarArithmeticColumnWithConvert", "Subtract", "timestamp", "date", "-", "", "TimestampUtils.daysToNanoseconds"}, + {"ColumnArithmeticScalarWithConvert", "Subtract", "timestamp", "date", "-", "", "TimestampUtils.daysToNanoseconds"}, + + {"ColumnArithmeticColumnWithConvert", "Add", "date", "interval_day_time", "+", "TimestampUtils.daysToNanoseconds", ""}, + {"ScalarArithmeticColumnWithConvert", "Add", "date", "interval_day_time", "+", "TimestampUtils.daysToNanoseconds", ""}, + {"ColumnArithmeticScalarWithConvert", "Add", "date", "interval_day_time", "+", "TimestampUtils.daysToNanoseconds", ""}, + + {"ColumnArithmeticColumnWithConvert", "Subtract", "date", "interval_day_time", "-", "TimestampUtils.daysToNanoseconds", ""}, + {"ScalarArithmeticColumnWithConvert", "Subtract", "date", "interval_day_time", "-", "TimestampUtils.daysToNanoseconds", ""}, + {"ColumnArithmeticScalarWithConvert", "Subtract", "date", "interval_day_time", "-", "TimestampUtils.daysToNanoseconds", ""}, + + {"ColumnArithmeticColumnWithConvert", "Add", "interval_day_time", "date", "+", "", "TimestampUtils.daysToNanoseconds"}, + {"ScalarArithmeticColumnWithConvert", "Add", "interval_day_time", "date", "+", "", "TimestampUtils.daysToNanoseconds"}, + {"ColumnArithmeticScalarWithConvert", "Add", "interval_day_time", "date", "+", "", "TimestampUtils.daysToNanoseconds"}, + + // Most year-month interval arithmetic needs its own generation + {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Add", "date", "interval_year_month", "+", "", "dtm.addMonthsToDays"}, + {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Add", "date", "interval_year_month", "+", "", "dtm.addMonthsToDays"}, + {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Add", "date", "interval_year_month", "+", "", "dtm.addMonthsToDays"}, + + {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Subtract", "date", "interval_year_month", "-", "", "dtm.addMonthsToDays"}, + {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Subtract", "date", "interval_year_month", "-", "", "dtm.addMonthsToDays"}, + {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Subtract", "date", "interval_year_month", "-", "", "dtm.addMonthsToDays"}, + + {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Add", "timestamp", "interval_year_month", "+", "", "dtm.addMonthsToNanosUtc"}, + {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Add", "timestamp", "interval_year_month", "+", "", "dtm.addMonthsToNanosUtc"}, + {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Add", "timestamp", "interval_year_month", "+", "", "dtm.addMonthsToNanosUtc"}, + + {"DateTimeColumnArithmeticIntervalColumnWithConvert", "Subtract", "timestamp", "interval_year_month", "-", "", "dtm.addMonthsToNanosUtc"}, + {"DateTimeScalarArithmeticIntervalColumnWithConvert", "Subtract", "timestamp", "interval_year_month", "-", "", "dtm.addMonthsToNanosUtc"}, + {"DateTimeColumnArithmeticIntervalScalarWithConvert", "Subtract", "timestamp", "interval_year_month", "-", "", "dtm.addMonthsToNanosUtc"}, + + {"IntervalColumnArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "date", "+", "", "dtm.addMonthsToDays"}, + {"IntervalScalarArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "date", "+", "", "dtm.addMonthsToDays"}, + {"IntervalColumnArithmeticDateTimeScalarWithConvert", "Add", "interval_year_month", "date", "+", "", "dtm.addMonthsToDays"}, + + {"IntervalColumnArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "timestamp", "+", "", "dtm.addMonthsToNanosUtc"}, + {"IntervalScalarArithmeticDateTimeColumnWithConvert", "Add", "interval_year_month", "timestamp", "+", "", "dtm.addMonthsToNanosUtc"}, + {"IntervalColumnArithmeticDateTimeScalarWithConvert", "Add", "interval_year_month", "timestamp", "+", "", "dtm.addMonthsToNanosUtc"}, + {"ColumnArithmeticScalar", "Add", "long", "long", "+"}, {"ColumnArithmeticScalar", "Subtract", "long", "long", "-"}, {"ColumnArithmeticScalar", "Multiply", "long", "long", "*"}, @@ -528,6 +611,88 @@ {"ColumnCompareColumn", "GreaterEqual", "long", "long", ">="}, {"ColumnCompareColumn", "GreaterEqual", "double", "long", ">="}, + // Interval comparisons + {"DTIScalarCompareColumn", "Equal", "interval_year_month"}, + {"DTIScalarCompareColumn", "Equal", "interval_day_time"}, + {"DTIScalarCompareColumn", "NotEqual", "interval_year_month"}, + {"DTIScalarCompareColumn", "NotEqual", "interval_day_time"}, + {"DTIScalarCompareColumn", "Less", "interval_year_month"}, + {"DTIScalarCompareColumn", "Less", "interval_day_time"}, + {"DTIScalarCompareColumn", "LessEqual", "interval_year_month"}, + {"DTIScalarCompareColumn", "LessEqual", "interval_day_time"}, + {"DTIScalarCompareColumn", "Greater", "interval_year_month"}, + {"DTIScalarCompareColumn", "Greater", "interval_day_time"}, + {"DTIScalarCompareColumn", "GreaterEqual", "interval_year_month"}, + {"DTIScalarCompareColumn", "GreaterEqual", "interval_day_time"}, + + {"DTIColumnCompareScalar", "Equal", "interval_year_month"}, + {"DTIColumnCompareScalar", "Equal", "interval_day_time"}, + {"DTIColumnCompareScalar", "NotEqual", "interval_year_month"}, + {"DTIColumnCompareScalar", "NotEqual", "interval_day_time"}, + {"DTIColumnCompareScalar", "Less", "interval_year_month"}, + {"DTIColumnCompareScalar", "Less", "interval_day_time"}, + {"DTIColumnCompareScalar", "LessEqual", "interval_year_month"}, + {"DTIColumnCompareScalar", "LessEqual", "interval_day_time"}, + {"DTIColumnCompareScalar", "Greater", "interval_year_month"}, + {"DTIColumnCompareScalar", "Greater", "interval_day_time"}, + {"DTIColumnCompareScalar", "GreaterEqual", "interval_year_month"}, + {"DTIColumnCompareScalar", "GreaterEqual", "interval_day_time"}, + + {"FilterDTIScalarCompareColumn", "Equal", "interval_year_month"}, + {"FilterDTIScalarCompareColumn", "Equal", "interval_day_time"}, + {"FilterDTIScalarCompareColumn", "NotEqual", "interval_year_month"}, + {"FilterDTIScalarCompareColumn", "NotEqual", "interval_day_time"}, + {"FilterDTIScalarCompareColumn", "Less", "interval_year_month"}, + {"FilterDTIScalarCompareColumn", "Less", "interval_day_time"}, + {"FilterDTIScalarCompareColumn", "LessEqual", "interval_year_month"}, + {"FilterDTIScalarCompareColumn", "LessEqual", "interval_day_time"}, + {"FilterDTIScalarCompareColumn", "Greater", "interval_year_month"}, + {"FilterDTIScalarCompareColumn", "Greater", "interval_day_time"}, + {"FilterDTIScalarCompareColumn", "GreaterEqual", "interval_year_month"}, + {"FilterDTIScalarCompareColumn", "GreaterEqual", "interval_day_time"}, + + {"FilterDTIColumnCompareScalar", "Equal", "interval_year_month"}, + {"FilterDTIColumnCompareScalar", "Equal", "interval_day_time"}, + {"FilterDTIColumnCompareScalar", "NotEqual", "interval_year_month"}, + {"FilterDTIColumnCompareScalar", "NotEqual", "interval_day_time"}, + {"FilterDTIColumnCompareScalar", "Less", "interval_year_month"}, + {"FilterDTIColumnCompareScalar", "Less", "interval_day_time"}, + {"FilterDTIColumnCompareScalar", "LessEqual", "interval_year_month"}, + {"FilterDTIColumnCompareScalar", "LessEqual", "interval_day_time"}, + {"FilterDTIColumnCompareScalar", "Greater", "interval_year_month"}, + {"FilterDTIColumnCompareScalar", "Greater", "interval_day_time"}, + {"FilterDTIColumnCompareScalar", "GreaterEqual", "interval_year_month"}, + {"FilterDTIColumnCompareScalar", "GreaterEqual", "interval_day_time"}, + + // Date comparisons + {"DTIScalarCompareColumn", "Equal", "date"}, + {"DTIScalarCompareColumn", "NotEqual", "date"}, + {"DTIScalarCompareColumn", "Less", "date"}, + {"DTIScalarCompareColumn", "LessEqual", "date"}, + {"DTIScalarCompareColumn", "Greater", "date"}, + {"DTIScalarCompareColumn", "GreaterEqual", "date"}, + + {"DTIColumnCompareScalar", "Equal", "date"}, + {"DTIColumnCompareScalar", "NotEqual", "date"}, + {"DTIColumnCompareScalar", "Less", "date"}, + {"DTIColumnCompareScalar", "LessEqual", "date"}, + {"DTIColumnCompareScalar", "Greater", "date"}, + {"DTIColumnCompareScalar", "GreaterEqual", "date"}, + + {"FilterDTIScalarCompareColumn", "Equal", "date"}, + {"FilterDTIScalarCompareColumn", "NotEqual", "date"}, + {"FilterDTIScalarCompareColumn", "Less", "date"}, + {"FilterDTIScalarCompareColumn", "LessEqual", "date"}, + {"FilterDTIScalarCompareColumn", "Greater", "date"}, + {"FilterDTIScalarCompareColumn", "GreaterEqual", "date"}, + + {"FilterDTIColumnCompareScalar", "Equal", "date"}, + {"FilterDTIColumnCompareScalar", "NotEqual", "date"}, + {"FilterDTIColumnCompareScalar", "Less", "date"}, + {"FilterDTIColumnCompareScalar", "LessEqual", "date"}, + {"FilterDTIColumnCompareScalar", "Greater", "date"}, + {"FilterDTIColumnCompareScalar", "GreaterEqual", "date"}, + // template, , , , , , // , {"ColumnUnaryFunc", "FuncRound", "double", "double", "MathExpr.round", "", "", "", ""}, @@ -896,6 +1061,38 @@ private void generate() throws Exception { generateFilterDecimalScalarCompareColumn(tdesc); } else if (tdesc[0].equals("FilterDecimalColumnCompareColumn")) { generateFilterDecimalColumnCompareColumn(tdesc); + } else if (tdesc[0].equals("FilterDTIScalarCompareColumn")) { + generateFilterDTIScalarCompareColumn(tdesc); + } else if (tdesc[0].equals("FilterDTIColumnCompareScalar")) { + generateFilterDTIColumnCompareScalar(tdesc); + } else if (tdesc[0].equals("DTIScalarCompareColumn")) { + generateDTIScalarCompareColumn(tdesc); + } else if (tdesc[0].equals("DTIColumnCompareScalar")) { + generateDTIColumnCompareScalar(tdesc); + } else if (tdesc[0].equals("DTIColumnArithmeticDTIScalarNoConvert")) { + generateColumnArithmeticScalar(tdesc); + } else if (tdesc[0].equals("DTIScalarArithmeticDTIColumnNoConvert")) { + generateScalarArithmeticColumn(tdesc); + } else if (tdesc[0].equals("DTIColumnArithmeticDTIColumnNoConvert")) { + generateColumnArithmeticColumn(tdesc); + } else if (tdesc[0].equals("ColumnArithmeticColumnWithConvert")) { + generateColumnArithmeticColumnWithConvert(tdesc); + } else if (tdesc[0].equals("ScalarArithmeticColumnWithConvert")) { + generateScalarArithmeticColumnWithConvert(tdesc); + } else if (tdesc[0].equals("ColumnArithmeticScalarWithConvert")) { + generateColumnArithmeticScalarWithConvert(tdesc); + } else if (tdesc[0].equals("DateTimeColumnArithmeticIntervalColumnWithConvert")) { + generateDateTimeColumnArithmeticIntervalColumnWithConvert(tdesc); + } else if (tdesc[0].equals("DateTimeScalarArithmeticIntervalColumnWithConvert")) { + generateDateTimeScalarArithmeticIntervalColumnWithConvert(tdesc); + } else if (tdesc[0].equals("DateTimeColumnArithmeticIntervalScalarWithConvert")) { + generateDateTimeColumnArithmeticIntervalScalarWithConvert(tdesc); + } else if (tdesc[0].equals("IntervalColumnArithmeticDateTimeColumnWithConvert")) { + generateDateTimeColumnArithmeticIntervalColumnWithConvert(tdesc); + } else if (tdesc[0].equals("IntervalScalarArithmeticDateTimeColumnWithConvert")) { + generateDateTimeScalarArithmeticIntervalColumnWithConvert(tdesc); + } else if (tdesc[0].equals("IntervalColumnArithmeticDateTimeScalarWithConvert")) { + generateDateTimeColumnArithmeticIntervalScalarWithConvert(tdesc); } else { continue; } @@ -1324,12 +1521,18 @@ private void generateColumnUnaryMinus(String[] tdesc) throws Exception { String className = getCamelCaseType(operandType) + "ColUnaryMinus"; File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); String templateString = readFile(templateFile); + String vectorExprArgType = operandType; + if (operandType.equals("long")) { + // interval types can use long version + vectorExprArgType = "int_interval_family"; + } // Expand, and write result templateString = templateString.replaceAll("", className); templateString = templateString.replaceAll("", inputColumnVectorType); templateString = templateString.replaceAll("", outputColumnVectorType); templateString = templateString.replaceAll("", operandType); templateString = templateString.replaceAll("", returnType); + templateString = templateString.replaceAll("", vectorExprArgType); writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, className, templateString); } @@ -1353,7 +1556,7 @@ private void generateIfExprColumnColumn(String[] tdesc) throws Exception { // Toss in timestamp and date. if (operandType.equals("long")) { // Let comparisons occur for DATE and TIMESTAMP, too. - vectorExprArgType = "int_datetime_family"; + vectorExprArgType = "int_datetime_interval_family"; } templateString = templateString.replaceAll("", vectorExprArgType); @@ -1385,8 +1588,8 @@ private void generateIfExprColumnScalar(String[] tdesc) throws Exception { // Toss in timestamp and date. if (operandType2.equals("long") && operandType3.equals("long")) { - vectorExprArgType2 = "int_datetime_family"; - vectorExprArgType3 = "int_datetime_family"; + vectorExprArgType2 = "int_datetime_interval_family"; + vectorExprArgType3 = "int_datetime_interval_family"; } templateString = templateString.replaceAll("", vectorExprArgType2); templateString = templateString.replaceAll("", vectorExprArgType3); @@ -1418,8 +1621,8 @@ private void generateIfExprScalarColumn(String[] tdesc) throws Exception { // Toss in timestamp and date. if (operandType2.equals("long") && operandType3.equals("long")) { - vectorExprArgType2 = "int_datetime_family"; - vectorExprArgType3 = "int_datetime_family"; + vectorExprArgType2 = "int_datetime_interval_family"; + vectorExprArgType3 = "int_datetime_interval_family"; } templateString = templateString.replaceAll("", vectorExprArgType2); templateString = templateString.replaceAll("", vectorExprArgType3); @@ -1450,8 +1653,8 @@ private void generateIfExprScalarScalar(String[] tdesc) throws Exception { // Toss in timestamp and date. if (operandType2.equals("long") && operandType3.equals("long")) { - vectorExprArgType2 = "int_datetime_family"; - vectorExprArgType3 = "int_datetime_family"; + vectorExprArgType2 = "int_datetime_interval_family"; + vectorExprArgType3 = "int_datetime_interval_family"; } templateString = templateString.replaceAll("", vectorExprArgType2); templateString = templateString.replaceAll("", vectorExprArgType3); @@ -1586,8 +1789,8 @@ private void generateColumnCompareOperatorColumn(String[] tdesc, boolean filter, // But {timestamp|date} and scalar must be handled separately. if (operandType1.equals("long") && operandType2.equals("long")) { // Let comparisons occur for DATE and TIMESTAMP, too. - vectorExprArgType1 = "int_datetime_family"; - vectorExprArgType2 = "int_datetime_family"; + vectorExprArgType1 = "int_datetime_interval_family"; + vectorExprArgType2 = "int_datetime_interval_family"; } templateString = templateString.replaceAll("", vectorExprArgType1); templateString = templateString.replaceAll("", vectorExprArgType2); @@ -1738,6 +1941,7 @@ private void generateFilterScalarCompareTimestampColumn(String[] tdesc) throws E private void generateColumnArithmeticOperatorColumn(String[] tdesc, String returnType, String className) throws Exception { + String operatorName = tdesc[1]; String operandType1 = tdesc[2]; String operandType2 = tdesc[3]; String outputColumnVectorType = this.getColumnVectorType(returnType); @@ -1752,6 +1956,7 @@ private void generateColumnArithmeticOperatorColumn(String[] tdesc, String retur templateString = templateString.replaceAll("", inputColumnVectorType1); templateString = templateString.replaceAll("", inputColumnVectorType2); templateString = templateString.replaceAll("", outputColumnVectorType); + templateString = templateString.replaceAll("", operatorName); templateString = templateString.replaceAll("", operatorSymbol); templateString = templateString.replaceAll("", operandType1); templateString = templateString.replaceAll("", operandType2); @@ -1813,6 +2018,7 @@ private void generateColumnCompareOperatorScalar(String[] tdesc, boolean filter, private void generateColumnArithmeticOperatorScalar(String[] tdesc, String returnType, String className) throws Exception { + String operatorName = tdesc[1]; String operandType1 = tdesc[2]; String operandType2 = tdesc[3]; String outputColumnVectorType = this.getColumnVectorType(returnType); @@ -1825,6 +2031,7 @@ private void generateColumnArithmeticOperatorScalar(String[] tdesc, String retur templateString = templateString.replaceAll("", className); templateString = templateString.replaceAll("", inputColumnVectorType); templateString = templateString.replaceAll("", outputColumnVectorType); + templateString = templateString.replaceAll("", operatorName); templateString = templateString.replaceAll("", operatorSymbol); templateString = templateString.replaceAll("", operandType1); templateString = templateString.replaceAll("", operandType2); @@ -1832,12 +2039,17 @@ private void generateColumnArithmeticOperatorScalar(String[] tdesc, String retur writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, className, templateString); + String testScalarType = operandType2; + if (isDateTimeIntervalType(testScalarType)) { + testScalarType = "long"; + } + testCodeGen.addColumnScalarOperationTestCases( true, className, inputColumnVectorType, outputColumnVectorType, - operandType2); + testScalarType); } private void generateScalarCompareOperatorColumn(String[] tdesc, boolean filter, @@ -1886,6 +2098,7 @@ private void generateScalarCompareOperatorColumn(String[] tdesc, boolean filter, private void generateScalarArithmeticOperatorColumn(String[] tdesc, String returnType, String className) throws Exception { + String operatorName = tdesc[1]; String operandType1 = tdesc[2]; String operandType2 = tdesc[3]; String outputColumnVectorType = this.getColumnVectorType( @@ -1899,6 +2112,7 @@ private void generateScalarArithmeticOperatorColumn(String[] tdesc, String retur templateString = templateString.replaceAll("", className); templateString = templateString.replaceAll("", inputColumnVectorType); templateString = templateString.replaceAll("", outputColumnVectorType); + templateString = templateString.replaceAll("", operatorName); templateString = templateString.replaceAll("", operatorSymbol); templateString = templateString.replaceAll("", operandType1); templateString = templateString.replaceAll("", operandType2); @@ -1907,12 +2121,17 @@ private void generateScalarArithmeticOperatorColumn(String[] tdesc, String retur writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, className, templateString); + String testScalarType = operandType1; + if (isDateTimeIntervalType(testScalarType)) { + testScalarType = "long"; + } + testCodeGen.addColumnScalarOperationTestCases( false, className, inputColumnVectorType, outputColumnVectorType, - operandType1); + testScalarType); } //Binary arithmetic operator @@ -2053,6 +2272,378 @@ private void generateDecimalColumnCompare(String[] tdesc, String className) className, templateString); } + // TODO: These can eventually be used to replace generateTimestampScalarCompareTimestampColumn() + private void generateDTIScalarCompareColumn(String[] tdesc) throws Exception { + String operatorName = tdesc[1]; + String operandType = tdesc[2]; + String className = getCamelCaseType(operandType) + "Scalar" + operatorName + + getCamelCaseType(operandType) + "Column"; + String baseClassName = "LongScalar" + operatorName + "LongColumn"; + //Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", baseClassName); + templateString = templateString.replaceAll("", operandType); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + } + + private void generateFilterDTIScalarCompareColumn(String[] tdesc) throws Exception { + String operatorName = tdesc[1]; + String operandType = tdesc[2]; + String className = "Filter" + getCamelCaseType(operandType) + "Scalar" + operatorName + + getCamelCaseType(operandType) + "Column"; + String baseClassName = "FilterLongScalar" + operatorName + "LongColumn"; + //Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", baseClassName); + templateString = templateString.replaceAll("", operandType); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + } + + private void generateDTIColumnCompareScalar(String[] tdesc) throws Exception { + String operatorName = tdesc[1]; + String operandType = tdesc[2]; + String className = getCamelCaseType(operandType) + "Col" + operatorName + + getCamelCaseType(operandType) + "Scalar"; + String baseClassName = "LongCol" + operatorName + "LongScalar"; + //Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", baseClassName); + templateString = templateString.replaceAll("", operandType); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + } + + private void generateFilterDTIColumnCompareScalar(String[] tdesc) throws Exception { + String operatorName = tdesc[1]; + String operandType = tdesc[2]; + String className = "Filter" + getCamelCaseType(operandType) + "Col" + operatorName + + getCamelCaseType(operandType) + "Scalar"; + String baseClassName = "FilterLongCol" + operatorName + "LongScalar"; + //Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", baseClassName); + templateString = templateString.replaceAll("", operandType); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + } + + private void generateColumnArithmeticColumnWithConvert(String[] tdesc) throws Exception { + String operatorName = tdesc[1]; + String operandType1 = tdesc[2]; + String operandType2 = tdesc[3]; + String operatorSymbol = tdesc[4]; + String typeConversion1 = tdesc[5]; + String typeConversion2 = tdesc[6]; + String className = getCamelCaseType(operandType1) + + "Col" + operatorName + getCamelCaseType(operandType2) + "Column"; + String returnType = getArithmeticReturnType(operandType1, operandType2); + String outputColumnVectorType = this.getColumnVectorType(returnType); + String inputColumnVectorType1 = this.getColumnVectorType(operandType1); + String inputColumnVectorType2 = this.getColumnVectorType(operandType2); + // For date/timestamp/interval, this should be "long" + String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1); + String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2); + String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType); + + //Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", inputColumnVectorType1); + templateString = templateString.replaceAll("", inputColumnVectorType2); + templateString = templateString.replaceAll("", outputColumnVectorType); + templateString = templateString.replaceAll("", operatorName); + templateString = templateString.replaceAll("", operatorSymbol); + templateString = templateString.replaceAll("", operandType1); + templateString = templateString.replaceAll("", operandType2); + templateString = templateString.replaceAll("", returnType); + templateString = templateString.replaceAll("", vectorOperandType1); + templateString = templateString.replaceAll("", vectorOperandType2); + templateString = templateString.replaceAll("", vectorReturnType); + templateString = templateString.replaceAll("", typeConversion1); + templateString = templateString.replaceAll("", typeConversion2); + templateString = templateString.replaceAll("", getCamelCaseType(vectorReturnType)); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + + testCodeGen.addColumnColumnOperationTestCases( + className, + inputColumnVectorType1, + inputColumnVectorType2, + outputColumnVectorType); + } + + private void generateScalarArithmeticColumnWithConvert(String[] tdesc) throws Exception { + String operatorName = tdesc[1]; + String operandType1 = tdesc[2]; + String operandType2 = tdesc[3]; + String operatorSymbol = tdesc[4]; + String typeConversion1 = tdesc[5]; + String typeConversion2 = tdesc[6]; + String className = getCamelCaseType(operandType1) + + "Scalar" + operatorName + getCamelCaseType(operandType2) + "Column"; + String returnType = getArithmeticReturnType(operandType1, operandType2); + String outputColumnVectorType = this.getColumnVectorType( + returnType == null ? "long" : returnType); + String inputColumnVectorType = this.getColumnVectorType(operandType2); + String inputColumnVectorType1 = this.getColumnVectorType(operandType1); + String inputColumnVectorType2 = this.getColumnVectorType(operandType2); + // For date/timestamp/interval, this should be "long" + String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1); + String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2); + String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType); + + //Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", inputColumnVectorType); + templateString = templateString.replaceAll("", outputColumnVectorType); + templateString = templateString.replaceAll("", operatorName); + templateString = templateString.replaceAll("", operatorSymbol); + templateString = templateString.replaceAll("", operandType1); + templateString = templateString.replaceAll("", operandType2); + templateString = templateString.replaceAll("", returnType); + templateString = templateString.replaceAll("", vectorOperandType1); + templateString = templateString.replaceAll("", vectorOperandType2); + templateString = templateString.replaceAll("", vectorReturnType); + templateString = templateString.replaceAll("", typeConversion1); + templateString = templateString.replaceAll("", typeConversion2); + templateString = templateString.replaceAll("", getCamelCaseType(vectorReturnType)); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + + String testScalarType = operandType1; + if (isDateTimeIntervalType(testScalarType)) { + testScalarType = "long"; + } + + testCodeGen.addColumnScalarOperationTestCases( + false, + className, + inputColumnVectorType, + outputColumnVectorType, + testScalarType); + } + + private void generateColumnArithmeticScalarWithConvert(String[] tdesc) throws Exception { + String operatorName = tdesc[1]; + String operandType1 = tdesc[2]; + String operandType2 = tdesc[3]; + String operatorSymbol = tdesc[4]; + String typeConversion1 = tdesc[5]; + String typeConversion2 = tdesc[6]; + String className = getCamelCaseType(operandType1) + + "Col" + operatorName + getCamelCaseType(operandType2) + "Scalar"; + String returnType = getArithmeticReturnType(operandType1, operandType2); + String outputColumnVectorType = this.getColumnVectorType(returnType); + String inputColumnVectorType = this.getColumnVectorType(operandType1); + String inputColumnVectorType1 = this.getColumnVectorType(operandType1); + String inputColumnVectorType2 = this.getColumnVectorType(operandType2); + // For date/timestamp/interval, this should be "long" + String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1); + String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2); + String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType); + + //Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", inputColumnVectorType); + templateString = templateString.replaceAll("", outputColumnVectorType); + templateString = templateString.replaceAll("", operatorName); + templateString = templateString.replaceAll("", operatorSymbol); + templateString = templateString.replaceAll("", operandType1); + templateString = templateString.replaceAll("", operandType2); + templateString = templateString.replaceAll("", returnType); + templateString = templateString.replaceAll("", vectorOperandType1); + templateString = templateString.replaceAll("", vectorOperandType2); + templateString = templateString.replaceAll("", vectorReturnType); + templateString = templateString.replaceAll("", typeConversion1); + templateString = templateString.replaceAll("", typeConversion2); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + + String testScalarType = operandType2; + if (isDateTimeIntervalType(testScalarType)) { + testScalarType = "long"; + } + + testCodeGen.addColumnScalarOperationTestCases( + true, + className, + inputColumnVectorType, + outputColumnVectorType, + testScalarType); + } + + private void generateDateTimeColumnArithmeticIntervalColumnWithConvert(String[] tdesc) throws Exception { + String operatorName = tdesc[1]; + String operandType1 = tdesc[2]; + String operandType2 = tdesc[3]; + String operatorSymbol = tdesc[4]; + String typeConversion = tdesc[5]; + String operatorFunction = tdesc[6]; + String className = getCamelCaseType(operandType1) + + "Col" + operatorName + getCamelCaseType(operandType2) + "Column"; + String returnType = getArithmeticReturnType(operandType1, operandType2); + String outputColumnVectorType = this.getColumnVectorType(returnType); + String inputColumnVectorType1 = this.getColumnVectorType(operandType1); + String inputColumnVectorType2 = this.getColumnVectorType(operandType2); + // For date/timestamp/interval, this should be "long" + String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1); + String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2); + String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType); + + //Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", inputColumnVectorType1); + templateString = templateString.replaceAll("", inputColumnVectorType2); + templateString = templateString.replaceAll("", outputColumnVectorType); + templateString = templateString.replaceAll("", operatorName); + templateString = templateString.replaceAll("", operatorSymbol); + templateString = templateString.replaceAll("", operandType1); + templateString = templateString.replaceAll("", operandType2); + templateString = templateString.replaceAll("", returnType); + templateString = templateString.replaceAll("", vectorOperandType1); + templateString = templateString.replaceAll("", vectorOperandType2); + templateString = templateString.replaceAll("", vectorReturnType); + templateString = templateString.replaceAll("", typeConversion); + templateString = templateString.replaceAll("", operatorFunction); + templateString = templateString.replaceAll("", getCamelCaseType(vectorReturnType)); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + + testCodeGen.addColumnColumnOperationTestCases( + className, + inputColumnVectorType1, + inputColumnVectorType2, + outputColumnVectorType); + } + + private void generateDateTimeScalarArithmeticIntervalColumnWithConvert(String[] tdesc) throws Exception { + String operatorName = tdesc[1]; + String operandType1 = tdesc[2]; + String operandType2 = tdesc[3]; + String operatorSymbol = tdesc[4]; + String typeConversion = tdesc[5]; + String operatorFunction = tdesc[6]; + String className = getCamelCaseType(operandType1) + + "Scalar" + operatorName + getCamelCaseType(operandType2) + "Column"; + String returnType = getArithmeticReturnType(operandType1, operandType2); + String outputColumnVectorType = this.getColumnVectorType( + returnType == null ? "long" : returnType); + String inputColumnVectorType = this.getColumnVectorType(operandType2); + String inputColumnVectorType1 = this.getColumnVectorType(operandType1); + String inputColumnVectorType2 = this.getColumnVectorType(operandType2); + // For date/timestamp/interval, this should be "long" + String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1); + String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2); + String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType); + + //Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", inputColumnVectorType); + templateString = templateString.replaceAll("", outputColumnVectorType); + templateString = templateString.replaceAll("", operatorName); + templateString = templateString.replaceAll("", operatorSymbol); + templateString = templateString.replaceAll("", operandType1); + templateString = templateString.replaceAll("", operandType2); + templateString = templateString.replaceAll("", returnType); + templateString = templateString.replaceAll("", vectorOperandType1); + templateString = templateString.replaceAll("", vectorOperandType2); + templateString = templateString.replaceAll("", vectorReturnType); + templateString = templateString.replaceAll("", typeConversion); + templateString = templateString.replaceAll("", operatorFunction); + templateString = templateString.replaceAll("", getCamelCaseType(vectorReturnType)); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + + String testScalarType = operandType1; + if (isDateTimeIntervalType(testScalarType)) { + testScalarType = "long"; + } + + testCodeGen.addColumnScalarOperationTestCases( + false, + className, + inputColumnVectorType, + outputColumnVectorType, + testScalarType); + } + + private void generateDateTimeColumnArithmeticIntervalScalarWithConvert(String[] tdesc) throws Exception { + String operatorName = tdesc[1]; + String operandType1 = tdesc[2]; + String operandType2 = tdesc[3]; + String operatorSymbol = tdesc[4]; + String typeConversion = tdesc[5]; + String operatorFunction = tdesc[6]; + String className = getCamelCaseType(operandType1) + + "Col" + operatorName + getCamelCaseType(operandType2) + "Scalar"; + String returnType = getArithmeticReturnType(operandType1, operandType2); + String outputColumnVectorType = this.getColumnVectorType(returnType); + String inputColumnVectorType = this.getColumnVectorType(operandType1); + String inputColumnVectorType1 = this.getColumnVectorType(operandType1); + String inputColumnVectorType2 = this.getColumnVectorType(operandType2); + // For date/timestamp/interval, this should be "long" + String vectorOperandType1 = this.getVectorPrimitiveType(inputColumnVectorType1); + String vectorOperandType2 = this.getVectorPrimitiveType(inputColumnVectorType2); + String vectorReturnType = this.getVectorPrimitiveType(outputColumnVectorType); + + //Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", inputColumnVectorType); + templateString = templateString.replaceAll("", outputColumnVectorType); + templateString = templateString.replaceAll("", operatorName); + templateString = templateString.replaceAll("", operatorSymbol); + templateString = templateString.replaceAll("", operandType1); + templateString = templateString.replaceAll("", operandType2); + templateString = templateString.replaceAll("", returnType); + templateString = templateString.replaceAll("", vectorOperandType1); + templateString = templateString.replaceAll("", vectorOperandType2); + templateString = templateString.replaceAll("", vectorReturnType); + templateString = templateString.replaceAll("", typeConversion); + templateString = templateString.replaceAll("", operatorFunction); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + + String testScalarType = operandType2; + if (isDateTimeIntervalType(testScalarType)) { + testScalarType = "long"; + } + + testCodeGen.addColumnScalarOperationTestCases( + true, + className, + inputColumnVectorType, + outputColumnVectorType, + testScalarType); + } + + private static boolean isDateTimeIntervalType(String type) { + return (type.equals("date") + || type.equals("timestamp") + || type.equals("interval_year_month") + || type.equals("interval_day_time")); + } + static void writeFile(long templateTime, String outputDir, String classesDir, String className, String str) throws IOException { File outputFile = new File(outputDir, className + ".java"); @@ -2098,6 +2689,14 @@ static String getCamelCaseType(String type) { return "Double"; } else if (type.equals("decimal")) { return "Decimal"; + } else if (type.equals("interval_year_month")) { + return "IntervalYearMonth"; + } else if (type.equals("interval_day_time")) { + return "IntervalDayTime"; + } else if (type.equals("timestamp")) { + return "Timestamp"; + } else if (type.equals("date")) { + return "Date"; } else { return type; } @@ -2111,20 +2710,60 @@ private static String getInitialCapWord(String word) { return firstLetterAsCap + word.substring(1); } + private static final String ARITHMETIC_RETURN_TYPES[][] = { + { "interval_year_month", "interval_year_month", "interval_year_month"}, + { "interval_year_month", "date", "date"}, + { "date", "interval_year_month", "date"}, + { "interval_year_month", "timestamp", "timestamp"}, + { "timestamp", "interval_year_month", "timestamp"}, + { "interval_day_time", "interval_day_time", "interval_day_time"}, + { "interval_day_time", "date", "timestamp"}, + { "date", "interval_day_time", "timestamp"}, + { "interval_day_time", "timestamp", "timestamp"}, + { "timestamp", "interval_day_time", "timestamp"}, + { "date", "date", "interval_day_time"}, + { "timestamp", "timestamp", "interval_day_time"}, + { "timestamp", "date", "interval_day_time"}, + { "date", "timestamp", "interval_day_time"}, + { "*", "double", "double"}, + { "double", "*", "double"}, + }; + private String getArithmeticReturnType(String operandType1, String operandType2) { +/* if (operandType1.equals("double") || operandType2.equals("double")) { return "double"; + } else if (operandType1.equals("interval_year_month") && + operandType2.equals("interval_year_month")) { + return "interval_year_month"; + } else if (operandType1.equals("interval_year_month") && + operandType2.equals("date")) { + return "date"; + } else if (operandType1.equals("date") && + operandType2.equals("interval_year_month")) { + return "date"; + } else if (operandType1.equals("interval_day_time") && + operandType2.equals("interval_day_time")) { + return "interval_day_time"; } else { return "long"; } +*/ + for (String[] combination : ARITHMETIC_RETURN_TYPES) { + if ((combination[0].equals("*") || combination[0].equals(operandType1)) && + (combination[1].equals("*") || combination[1].equals(operandType2))) { + return combination[2]; + } + } + return "long"; } private String getColumnVectorType(String primitiveType) throws Exception { if(primitiveType.equals("double")) { return "DoubleColumnVector"; - } else if (primitiveType.equals("long")) { + } else if (primitiveType.equals("long") || isDateTimeIntervalType(primitiveType)) { return "LongColumnVector"; } else if (primitiveType.equals("decimal")) { return "DecimalColumnVector"; @@ -2134,6 +2773,19 @@ private String getColumnVectorType(String primitiveType) throws Exception { throw new Exception("Unimplemented primitive column vector type: " + primitiveType); } + private String getVectorPrimitiveType(String columnVectorType) throws Exception { + if (columnVectorType.equals("LongColumnVector")) { + return "long"; + } else if (columnVectorType.equals("double")) { + return "double"; + } else if (columnVectorType.equals("DecimalColumnVector")) { + return "decimal"; + } else if (columnVectorType.equals("BytesColumnVector")) { + return "string"; + } + throw new Exception("Could not determine primitive type for column vector type: " + columnVectorType); + } + private String getOutputWritableType(String primitiveType) throws Exception { if (primitiveType.equals("long")) { return "LongWritable"; @@ -2141,6 +2793,14 @@ private String getOutputWritableType(String primitiveType) throws Exception { return "DoubleWritable"; } else if (primitiveType.equals("decimal")) { return "HiveDecimalWritable"; + } else if (primitiveType.equals("interval_year_month")) { + return "HiveIntervalYearMonthWritable"; + } else if (primitiveType.equals("interval_day_time")) { + return "HiveIntervalDayTimeWritable"; + } else if (primitiveType.equals("date")) { + return "HiveDateWritable"; + } else if (primitiveType.equals("timestamp")) { + return "HiveTimestampWritable"; } throw new Exception("Unimplemented primitive output writable: " + primitiveType); } @@ -2152,6 +2812,14 @@ private String getOutputObjectInspector(String primitiveType) throws Exception { return "PrimitiveObjectInspectorFactory.writableDoubleObjectInspector"; } else if (primitiveType.equals("decimal")) { return "PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector"; + } else if (primitiveType.equals("interval_year_month")) { + return "PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector"; + } else if (primitiveType.equals("interval_day_time")) { + return "PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector"; + } else if (primitiveType.equals("date")) { + return "PrimitiveObjectInspectorFactory.writableDateObjectInspector"; + } else if (primitiveType.equals("timestamp")) { + return "PrimitiveObjectInspectorFactory.writableTimestampObjectInspector"; } throw new Exception("Unimplemented primitive output inspector: " + primitiveType); } diff --git a/common/src/java/org/apache/hive/common/util/DateTimeMath.java b/common/src/java/org/apache/hive/common/util/DateTimeMath.java deleted file mode 100644 index 28030e6..0000000 --- a/common/src/java/org/apache/hive/common/util/DateTimeMath.java +++ /dev/null @@ -1,195 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.common.util; - -import java.sql.Date; -import java.sql.Timestamp; -import java.util.Calendar; -import java.util.TimeZone; -import java.util.concurrent.TimeUnit; - -import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; -import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; - - -public class DateTimeMath { - - private static class NanosResult { - public int seconds; - public int nanos; - - public void addNanos(int leftNanos, int rightNanos) { - seconds = 0; - nanos = leftNanos + rightNanos; - if (nanos < 0) { - seconds = -1; - nanos += DateUtils.NANOS_PER_SEC; - } else if (nanos >= DateUtils.NANOS_PER_SEC) { - seconds = 1; - nanos -= DateUtils.NANOS_PER_SEC; - } - } - } - - protected Calendar calUtc = Calendar.getInstance(TimeZone.getTimeZone("UTC")); - protected Calendar calLocal = Calendar.getInstance(); - protected NanosResult nanosResult = new NanosResult(); - - // - // Operations involving/returning year-month intervals - // - - /** - * Perform month arithmetic to millis value using UTC time zone. - * @param millis - * @param months - * @return - */ - public long addMonthsToMillisUtc(long millis, int months) { - calUtc.setTimeInMillis(millis); - calUtc.add(Calendar.MONTH, months); - return calUtc.getTimeInMillis(); - } - - /** - * Perform month arithmetic to millis value using local time zone. - * @param millis - * @param months - * @return - */ - public long addMonthsToMillisLocal(long millis, int months) { - calLocal.setTimeInMillis(millis); - calLocal.add(Calendar.MONTH, months); - return calLocal.getTimeInMillis(); - } - - public Timestamp add(Timestamp ts, HiveIntervalYearMonth interval) { - if (ts == null || interval == null) { - return null; - } - - // Attempt to match Oracle semantics for timestamp arithmetic, - // where timestamp arithmetic is done in UTC, then converted back to local timezone - long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths()); - Timestamp tsResult = new Timestamp(resultMillis); - tsResult.setNanos(ts.getNanos()); - - return tsResult; - } - - public Date add(Date dt, HiveIntervalYearMonth interval) { - if (dt == null || interval == null) { - return null; - } - - // Since Date millis value is in local timezone representation, do date arithmetic - // using local timezone so the time remains at the start of the day. - long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths()); - return new Date(resultMillis); - } - - public HiveIntervalYearMonth add(HiveIntervalYearMonth left, HiveIntervalYearMonth right) { - HiveIntervalYearMonth result = null; - if (left == null || right == null) { - return null; - } - - result = new HiveIntervalYearMonth(left.getTotalMonths() + right.getTotalMonths()); - return result; - } - - public Timestamp subtract(Timestamp left, HiveIntervalYearMonth right) { - if (left == null || right == null) { - return null; - } - return add(left, right.negate()); - } - - public Date subtract(Date left, HiveIntervalYearMonth right) { - if (left == null || right == null) { - return null; - } - return add(left, right.negate()); - } - - public HiveIntervalYearMonth subtract(HiveIntervalYearMonth left, HiveIntervalYearMonth right) { - if (left == null || right == null) { - return null; - } - return add(left, right.negate()); - } - - // - // Operations involving/returning day-time intervals - // - - public Timestamp add(Timestamp ts, HiveIntervalDayTime interval) { - if (ts == null || interval == null) { - return null; - } - - nanosResult.addNanos(ts.getNanos(), interval.getNanos()); - - long newMillis = ts.getTime() - + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds); - Timestamp tsResult = new Timestamp(newMillis); - tsResult.setNanos(nanosResult.nanos); - return tsResult; - } - - public HiveIntervalDayTime add(HiveIntervalDayTime left, HiveIntervalDayTime right) { - HiveIntervalDayTime result = null; - if (left == null || right == null) { - return null; - } - - nanosResult.addNanos(left.getNanos(), right.getNanos()); - - long totalSeconds = left.getTotalSeconds() + right.getTotalSeconds() + nanosResult.seconds; - result = new HiveIntervalDayTime(totalSeconds, nanosResult.nanos); - return result; - } - - public Timestamp subtract(Timestamp left, HiveIntervalDayTime right) { - if (left == null || right == null) { - return null; - } - return add(left, right.negate()); - } - - public HiveIntervalDayTime subtract(HiveIntervalDayTime left, HiveIntervalDayTime right) { - if (left == null || right == null) { - return null; - } - return add(left, right.negate()); - } - - public HiveIntervalDayTime subtract(Timestamp left, Timestamp right) { - HiveIntervalDayTime result = null; - if (left == null || right == null) { - return null; - } - - nanosResult.addNanos(left.getNanos(), -(right.getNanos())); - - long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getTime()) - - TimeUnit.MILLISECONDS.toSeconds(right.getTime()) + nanosResult.seconds; - result = new HiveIntervalDayTime(totalSeconds, nanosResult.nanos); - return result; - } -} diff --git a/common/src/java/org/apache/hive/common/util/DateUtils.java b/common/src/java/org/apache/hive/common/util/DateUtils.java index b4159d3..454a6c8 100644 --- a/common/src/java/org/apache/hive/common/util/DateUtils.java +++ b/common/src/java/org/apache/hive/common/util/DateUtils.java @@ -21,6 +21,8 @@ import java.math.BigDecimal; import java.text.SimpleDateFormat; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; + /** * DateUtils. Thread-safe class * @@ -54,4 +56,14 @@ public static int parseNumericValueWithRange(String fieldName, } return result; } -} \ No newline at end of file + + public static long getIntervalDayTimeTotalNanos(HiveIntervalDayTime intervalDayTime) { + return intervalDayTime.getTotalSeconds() * NANOS_PER_SEC + intervalDayTime.getNanos(); + } + + public static void setIntervalDayTimeTotalNanos(HiveIntervalDayTime intervalDayTime, + long totalNanos) { + intervalDayTime.set(totalNanos / NANOS_PER_SEC, (int) (totalNanos % NANOS_PER_SEC)); + } +} + diff --git a/common/src/test/org/apache/hive/common/util/TestDateTimeMath.java b/common/src/test/org/apache/hive/common/util/TestDateTimeMath.java deleted file mode 100644 index 4886576..0000000 --- a/common/src/test/org/apache/hive/common/util/TestDateTimeMath.java +++ /dev/null @@ -1,463 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hive.common.util; - -import java.sql.Date; -import java.sql.Timestamp; -import java.util.TimeZone; - -import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; -import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; -import org.junit.*; - -import static org.junit.Assert.*; - -public class TestDateTimeMath { - - @Test - public void testTimestampIntervalYearMonthArithmetic() throws Exception { - char plus = '+'; - char minus = '-'; - - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-0", - "2001-01-01 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "1-1", - "2002-02-01 01:02:03.456"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "10-0", - "2011-01-01 01:02:03.456"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "0-11", - "2001-12-01 01:02:03.456"); - checkTimestampIntervalYearMonthArithmetic("2001-03-01 01:02:03.500", plus, "1-11", - "2003-02-01 01:02:03.500"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.500", plus, "-1-1", - "1999-12-01 01:02:03.500"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.500", plus, "-0-0", - "2001-01-01 01:02:03.500"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.123456789", plus, "-0-0", - "2001-01-01 01:02:03.123456789"); - - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "0-0", - "2001-01-01 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "10-0", - "1991-01-01 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "-10-0", - "2011-01-01 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "8-2", - "1992-11-01 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "-8-2", - "2009-03-01 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.123456789", minus, "8-2", - "1992-11-01 01:02:03.123456789"); - - checkTimestampIntervalYearMonthArithmetic(null, plus, "1-1", - null); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, null, - null); - checkTimestampIntervalYearMonthArithmetic(null, minus, "1-1", - null); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, null, - null); - - // End of the month behavior - checkTimestampIntervalYearMonthArithmetic("2001-01-28 01:02:03", plus, "0-1", - "2001-02-28 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-01-29 01:02:03", plus, "0-1", - "2001-02-28 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-01-30 01:02:03", plus, "0-1", - "2001-02-28 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-01-31 01:02:03", plus, "0-1", - "2001-02-28 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-02-28 01:02:03", plus, "0-1", - "2001-03-28 01:02:03"); - - // Test that timestamp arithmetic is done in UTC and then converted back to local timezone, - // matching Oracle behavior. - TimeZone originalTz = TimeZone.getDefault(); - try { - TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-6", - "2001-07-01 02:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-07-01 01:02:03", plus, "0-6", - "2002-01-01 00:02:03"); - - TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-6", - "2001-07-01 01:02:03"); - checkTimestampIntervalYearMonthArithmetic("2001-07-01 01:02:03", plus, "0-6", - "2002-01-01 01:02:03"); - } finally { - TimeZone.setDefault(originalTz); - } - } - - @Test - public void testDateIntervalYearMonthArithmetic() throws Exception { - char plus = '+'; - char minus = '-'; - - checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-0", "2001-01-01"); - checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-1", "2001-02-01"); - checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-6", "2001-07-01"); - checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "1-0", "2002-01-01"); - checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "1-1", "2002-02-01"); - checkDateIntervalDayTimeArithmetic("2001-10-10", plus, "1-6", "2003-04-10"); - checkDateIntervalDayTimeArithmetic("2003-04-10", plus, "-1-6", "2001-10-10"); - - checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "0-0", "2001-01-01"); - checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "0-1", "2000-12-01"); - checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "1-0", "2000-01-01"); - checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "1-1", "1999-12-01"); - checkDateIntervalDayTimeArithmetic("2001-10-10", minus, "1-6", "2000-04-10"); - checkDateIntervalDayTimeArithmetic("2003-04-10", minus, "-1-6", "2004-10-10"); - - // end of month behavior - checkDateIntervalDayTimeArithmetic("2001-01-28", plus, "0-1", "2001-02-28"); - checkDateIntervalDayTimeArithmetic("2001-01-29", plus, "0-1", "2001-02-28"); - checkDateIntervalDayTimeArithmetic("2001-01-30", plus, "0-1", "2001-02-28"); - checkDateIntervalDayTimeArithmetic("2001-01-31", plus, "0-1", "2001-02-28"); - checkDateIntervalDayTimeArithmetic("2001-01-31", plus, "0-2", "2001-03-31"); - checkDateIntervalDayTimeArithmetic("2001-02-28", plus, "0-1", "2001-03-28"); - // leap year - checkDateIntervalDayTimeArithmetic("2004-01-28", plus, "0-1", "2004-02-28"); - checkDateIntervalDayTimeArithmetic("2004-01-29", plus, "0-1", "2004-02-29"); - checkDateIntervalDayTimeArithmetic("2004-01-30", plus, "0-1", "2004-02-29"); - checkDateIntervalDayTimeArithmetic("2004-01-31", plus, "0-1", "2004-02-29"); - } - - @Test - public void testIntervalYearMonthArithmetic() throws Exception { - char plus = '+'; - char minus = '-'; - - checkIntervalYearMonthArithmetic("0-0", plus, "0-0", "0-0"); - checkIntervalYearMonthArithmetic("0-0", plus, "4-5", "4-5"); - checkIntervalYearMonthArithmetic("4-5", plus, "0-0", "4-5"); - checkIntervalYearMonthArithmetic("0-0", plus, "1-1", "1-1"); - checkIntervalYearMonthArithmetic("1-1", plus, "0-0", "1-1"); - - checkIntervalYearMonthArithmetic("0-0", minus, "0-0", "0-0"); - checkIntervalYearMonthArithmetic("0-0", minus, "1-0", "-1-0"); - checkIntervalYearMonthArithmetic("1-2", minus, "1-1", "0-1"); - checkIntervalYearMonthArithmetic("0-0", minus, "1-1", "-1-1"); - checkIntervalYearMonthArithmetic("-1-1", minus, "1-1", "-2-2"); - checkIntervalYearMonthArithmetic("-1-1", minus, "-1-1", "0-0"); - - checkIntervalYearMonthArithmetic(null, plus, "1-1", null); - checkIntervalYearMonthArithmetic("1-1", plus, null, null); - checkIntervalYearMonthArithmetic(null, minus, "1-1", null); - checkIntervalYearMonthArithmetic("1-1", minus, null, null); - } - - @Test - public void testTimestampIntervalDayTimeArithmetic() throws Exception { - char plus = '+'; - char minus = '-'; - - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, "1 1:1:1", - "2001-01-02 02:03:04"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1", - "2001-01-02 02:03:04.456"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1.555", - "2001-01-02 02:03:05.011"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, "1 1:1:1.555555555", - "2001-01-02 02:03:04.555555555"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1.555555555", - "2001-01-02 02:03:05.011555555"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.499", - "2001-01-02 02:03:04.999"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.500", - "2001-01-02 02:03:05.0"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.501", - "2001-01-02 02:03:05.001"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.4999999999", - "2001-01-02 02:03:04.999999999"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.500", - "2001-01-02 02:03:05.0"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.500000001", - "2001-01-02 02:03:05.000000001"); - - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, "0 01:02:03", - "2001-01-01 00:00:00"); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, "0 0:0:0", - "2001-01-01 01:02:03"); - - checkTsIntervalDayTimeArithmetic(null, plus, "1 1:1:1.555555555", - null); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, null, - null); - checkTsIntervalDayTimeArithmetic(null, minus, "1 1:1:1.555555555", - null); - checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, null, - null); - - // Try some time zone boundaries - TimeZone originalTz = TimeZone.getDefault(); - try { - // America/Los_Angeles DST dates - 2015-03-08 02:00:00/2015-11-01 02:00:00 - TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); - - checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:58", plus, "0 0:0:01", - "2015-03-08 01:59:59"); - checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59", plus, "0 0:0:01", - "2015-03-08 03:00:00"); - checkTsIntervalDayTimeArithmetic("2015-03-08 03:00:00", minus, "0 0:0:01", - "2015-03-08 01:59:59"); - checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59.995", plus, "0 0:0:0.005", - "2015-03-08 03:00:00"); - checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59.995", plus, "0 0:0:0.0051", - "2015-03-08 03:00:00.0001"); - checkTsIntervalDayTimeArithmetic("2015-03-08 03:00:00", minus, "0 0:0:0.005", - "2015-03-08 01:59:59.995"); - checkTsIntervalDayTimeArithmetic("2015-11-01 01:59:58", plus, "0 0:0:01", - "2015-11-01 01:59:59"); - checkTsIntervalDayTimeArithmetic("2015-11-01 01:59:59", plus, "0 0:0:01", - "2015-11-01 02:00:00"); - - // UTC has no such adjustment - TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:58", plus, "0 0:0:01", - "2015-03-08 01:59:59"); - checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59", plus, "0 0:0:01", - "2015-03-08 02:00:00"); - } finally { - TimeZone.setDefault(originalTz); - } - } - - @Test - public void testIntervalDayTimeArithmetic() throws Exception { - char plus = '+'; - char minus = '-'; - - checkIntervalDayTimeArithmetic("0 0:0:0", plus, "0 0:0:0", "0 0:0:0"); - checkIntervalDayTimeArithmetic("0 01:02:03", plus, "6 0:0:0.0001", "6 01:02:03.0001"); - checkIntervalDayTimeArithmetic("6 0:0:0.0001", plus, "0 01:02:03", "6 01:02:03.0001"); - checkIntervalDayTimeArithmetic("0 01:02:03", plus, "1 10:10:10.0001", "1 11:12:13.0001"); - checkIntervalDayTimeArithmetic("1 10:10:10.0001", plus, "0 01:02:03", "1 11:12:13.0001"); - checkIntervalDayTimeArithmetic("0 0:0:0.900000000", plus, "0 0:0:0.099999999", "0 0:0:0.999999999"); - checkIntervalDayTimeArithmetic("0 0:0:0.900000001", plus, "0 0:0:0.099999999", "0 0:0:1"); - checkIntervalDayTimeArithmetic("0 0:0:0.900000002", plus, "0 0:0:0.099999999", "0 0:0:1.000000001"); - - checkIntervalDayTimeArithmetic("0 0:0:0", minus, "0 0:0:0", "0 0:0:0"); - checkIntervalDayTimeArithmetic("0 0:0:0", minus, "0 0:0:0.123", "-0 0:0:0.123"); - checkIntervalDayTimeArithmetic("3 4:5:6.789", minus, "1 1:1:1.111", "2 3:4:5.678"); - checkIntervalDayTimeArithmetic("0 0:0:0.0", minus, "1 1:1:1.111", "-1 1:1:1.111"); - checkIntervalDayTimeArithmetic("-1 1:1:1.222", minus, "1 1:1:1.111", "-2 2:2:2.333"); - checkIntervalDayTimeArithmetic("-1 1:1:1.111", minus, "-1 1:1:1.111", "0 0:0:0"); - - checkIntervalDayTimeArithmetic(null, plus, "1 1:1:1.111", null); - checkIntervalDayTimeArithmetic("1 1:1:1.111", plus, null, null); - checkIntervalDayTimeArithmetic(null, minus, "1 1:1:1.111", null); - checkIntervalDayTimeArithmetic("1 1:1:1.111", minus, null, null); - } - - @Test - public void testTimestampSubtraction() throws Exception { - checkTsArithmetic("2001-01-01 00:00:00", "2001-01-01 00:00:00", "0 0:0:0"); - checkTsArithmetic("2002-02-02 01:01:01", "2001-01-01 00:00:00", "397 1:1:1"); - checkTsArithmetic("2001-01-01 00:00:00", "2002-02-02 01:01:01", "-397 1:1:1"); - checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 00:00:00", "1 0:0:0"); - checkTsArithmetic("2014-12-31 00:00:00", "2015-01-01 00:00:00", "-1 0:0:0"); - checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 23:59:59", "0 0:0:01"); - checkTsArithmetic("2014-12-31 23:59:59", "2015-01-01 00:00:00", "-0 0:0:01"); - checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 23:59:59.9999", "0 0:0:00.0001"); - checkTsArithmetic("2014-12-31 23:59:59.9999", "2015-01-01 00:00:00", "-0 0:0:00.0001"); - checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 11:12:13.000000001", "0 12:47:46.999999999"); - checkTsArithmetic("2014-12-31 11:12:13.000000001", "2015-01-01 00:00:00", "-0 12:47:46.999999999"); - - // Test that timestamp arithmetic is done in UTC and then converted back to local timezone, - // matching Oracle behavior. - TimeZone originalTz = TimeZone.getDefault(); - try { - TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); - checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 1:0:0"); - checkTsArithmetic("1999-09-15 00:00:00", "1999-12-15 00:00:00", "-91 1:0:0"); - checkTsArithmetic("1999-12-15 00:00:00", "1995-09-15 00:00:00", "1552 1:0:0"); - checkTsArithmetic("1995-09-15 00:00:00", "1999-12-15 00:00:00", "-1552 1:0:0"); - - TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 0:0:0"); - checkTsArithmetic("1999-09-15 00:00:00", "1999-12-15 00:00:00", "-91 0:0:0"); - checkTsArithmetic("1999-12-15 00:00:00", "1995-09-15 00:00:00", "1552 0:0:0"); - checkTsArithmetic("1995-09-15 00:00:00", "1999-12-15 00:00:00", "-1552 0:0:0"); - } finally { - TimeZone.setDefault(originalTz); - } - } - - private static void checkTimestampIntervalYearMonthArithmetic( - String left, char operationType, String right, String expected) throws Exception { - Timestamp leftTs = null; - if (left != null) { - leftTs = Timestamp.valueOf(left); - } - HiveIntervalYearMonth rightInterval = null; - if (right != null) { - rightInterval = HiveIntervalYearMonth.valueOf(right); - } - Timestamp expectedResult = null; - if (expected != null) { - expectedResult = Timestamp.valueOf(expected); - } - Timestamp testResult = null; - - DateTimeMath dtm = new DateTimeMath(); - switch (operationType) { - case '-': - testResult = dtm.subtract(leftTs, rightInterval); - break; - case '+': - testResult = dtm.add(leftTs, rightInterval); - break; - default: - throw new IllegalArgumentException("Invalid operation " + operationType); - } - - assertEquals(String.format("%s %s %s", leftTs, operationType, rightInterval), - expectedResult, testResult); - } - - private static void checkDateIntervalDayTimeArithmetic( - String left, char operationType, String right, String expected) throws Exception { - Date leftDt = null; - if (left != null) { - leftDt = Date.valueOf(left); - } - HiveIntervalYearMonth rightInterval = null; - if (right != null) { - rightInterval = HiveIntervalYearMonth.valueOf(right); - } - Date expectedResult = null; - if (expected != null) { - expectedResult = Date.valueOf(expected); - } - Date testResult = null; - - DateTimeMath dtm = new DateTimeMath(); - switch (operationType) { - case '-': - testResult = dtm.subtract(leftDt, rightInterval); - break; - case '+': - testResult = dtm.add(leftDt, rightInterval); - break; - default: - throw new IllegalArgumentException("Invalid operation " + operationType); - } - - assertEquals(String.format("%s %s %s", leftDt, operationType, rightInterval), - expectedResult, testResult); - } - - private static void checkIntervalYearMonthArithmetic( - String left, char operationType, String right, String expected) throws Exception { - HiveIntervalYearMonth leftInterval = left == null ? null: HiveIntervalYearMonth.valueOf(left); - HiveIntervalYearMonth rightInterval = right == null ? null : HiveIntervalYearMonth.valueOf(right); - HiveIntervalYearMonth expectedResult = expected == null ? null : HiveIntervalYearMonth.valueOf(expected); - HiveIntervalYearMonth testResult = null; - - DateTimeMath dtm = new DateTimeMath(); - switch (operationType) { - case '-': - testResult = dtm.subtract(leftInterval, rightInterval); - break; - case '+': - testResult = dtm.add(leftInterval, rightInterval); - break; - default: - throw new IllegalArgumentException("Invalid operation " + operationType); - } - - assertEquals(String.format("%s %s %s", leftInterval, operationType, rightInterval), - expectedResult, testResult); - } - - private static void checkTsIntervalDayTimeArithmetic( - String left, char operationType, String right, String expected) throws Exception { - Timestamp leftTs = null; - if (left != null) { - leftTs = Timestamp.valueOf(left); - } - HiveIntervalDayTime rightInterval = right == null ? null : HiveIntervalDayTime.valueOf(right); - Timestamp expectedResult = null; - if (expected != null) { - expectedResult = Timestamp.valueOf(expected); - } - Timestamp testResult = null; - - DateTimeMath dtm = new DateTimeMath(); - switch (operationType) { - case '-': - testResult = dtm.subtract(leftTs, rightInterval); - break; - case '+': - testResult = dtm.add(leftTs, rightInterval); - break; - default: - throw new IllegalArgumentException("Invalid operation " + operationType); - } - - assertEquals(String.format("%s %s %s", leftTs, operationType, rightInterval), - expectedResult, testResult); - } - - private static void checkIntervalDayTimeArithmetic( - String left, char operationType, String right, String expected) throws Exception { - HiveIntervalDayTime leftInterval = left == null ? null : HiveIntervalDayTime.valueOf(left); - HiveIntervalDayTime rightInterval = right == null ? null : HiveIntervalDayTime.valueOf(right); - HiveIntervalDayTime expectedResult = expected == null ? null : HiveIntervalDayTime.valueOf(expected); - HiveIntervalDayTime testResult = null; - - DateTimeMath dtm = new DateTimeMath(); - switch (operationType) { - case '-': - testResult = dtm.subtract(leftInterval, rightInterval); - break; - case '+': - testResult = dtm.add(leftInterval, rightInterval); - break; - default: - throw new IllegalArgumentException("Invalid operation " + operationType); - } - - assertEquals(String.format("%s %s %s", leftInterval, operationType, rightInterval), - expectedResult, testResult); - } - - private static void checkTsArithmetic( - String left, String right, String expected) throws Exception { - Timestamp leftTs = null; - if (left != null) { - leftTs = Timestamp.valueOf(left); - } - Timestamp rightTs = null; - if (left != null) { - rightTs = Timestamp.valueOf(right); - } - HiveIntervalDayTime expectedResult = null; - if (expected != null) { - expectedResult = HiveIntervalDayTime.valueOf(expected); - } - DateTimeMath dtm = new DateTimeMath(); - HiveIntervalDayTime testResult = - dtm.subtract(leftTs, rightTs); - - assertEquals(String.format("%s - %s", leftTs, rightTs), - expectedResult, testResult); - } -} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnWithConvert.txt new file mode 100644 index 0000000..bcd10a2 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticColumnWithConvert.txt @@ -0,0 +1,173 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; +import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Generated from template ColumnArithmeticColumnWithConvert.txt, which covers binary arithmetic + * expressions between columns. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum1; + private int colNum2; + private int outputColumn; + + public (int colNum1, int colNum2, int outputColumn) { + this.colNum1 = colNum1; + this.colNum2 = colNum2; + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + inputColVector1 = () batch.cols[colNum1]; + inputColVector2 = () batch.cols[colNum2]; + outputColVector = () batch.cols[outputColumn]; + int[] sel = batch.selected; + int n = batch.size; + [] vector1 = inputColVector1.vector; + [] vector2 = inputColVector2.vector; + [] outputVector = outputColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + outputColVector.isRepeating = + inputColVector1.isRepeating && inputColVector2.isRepeating + || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0] + || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0]; + + // Handle nulls first + NullUtil.propagateNullsColCol( + inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse); + + /* Disregard nulls for processing. In other words, + * the arithmetic operation is performed even if one or + * more inputs are null. This is to improve speed by avoiding + * conditional checks in the inner loop. + */ + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + outputVector[0] = (vector1[0]) (vector2[0]); + } else if (inputColVector1.isRepeating) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = (vector1[0]) (vector2[i]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = (vector1[0]) (vector2[i]); + } + } + } else if (inputColVector2.isRepeating) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = (vector1[i]) (vector2[0]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = (vector1[i]) (vector2[0]); + } + } + } else { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = (vector1[i]) (vector2[i]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = (vector1[i]) (vector2[i]); + } + } + } + + /* For the case when the output can have null values, follow + * the convention that the data values must be 1 for long and + * NaN for double. This is to prevent possible later zero-divide errors + * in complex arithmetic expressions like col2 / (col1 - 1) + * in the case when some col1 entries are null. + */ + NullUtil.setNullDataEntries(outputColVector, batch.selectedInUse, sel, n); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return ""; + } + + public int getColNum1() { + return colNum1; + } + + public void setColNum1(int colNum1) { + this.colNum1 = colNum1; + } + + public int getColNum2() { + return colNum2; + } + + public void setColNum2(int colNum2) { + this.colNum2 = colNum2; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} + diff --git a/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarWithConvert.txt new file mode 100644 index 0000000..105eb92 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/ColumnArithmeticScalarWithConvert.txt @@ -0,0 +1,150 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.*; + +/** + * Generated from template ColumnArithmeticScalarWithConvert.txt, which covers binary arithmetic + * expressions between a column and a scalar. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum; + private value; + private int outputColumn; + + public (int colNum, value, int outputColumn) { + this.colNum = colNum; + this.value = (value); + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + inputColVector = () batch.cols[colNum]; + outputColVector = () batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] inputIsNull = inputColVector.isNull; + boolean[] outputIsNull = outputColVector.isNull; + outputColVector.noNulls = inputColVector.noNulls; + outputColVector.isRepeating = inputColVector.isRepeating; + int n = batch.size; + [] vector = inputColVector.vector; + [] outputVector = outputColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + if (inputColVector.isRepeating) { + outputVector[0] = (vector[0]) value; + + // Even if there are no nulls, we always copy over entry 0. Simplifies code. + outputIsNull[0] = inputIsNull[0]; + } else if (inputColVector.noNulls) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = (vector[i]) value; + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = (vector[i]) value; + } + } + } else /* there are nulls */ { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = (vector[i]) value; + outputIsNull[i] = inputIsNull[i]; + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = (vector[i]) value; + } + System.arraycopy(inputIsNull, 0, outputIsNull, 0, n); + } + } + + NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return ""; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public getValue() { + return value; + } + + public void setValue( value) { + this.value = value; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt b/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt index 6bf6def..b52b7c7 100644 --- a/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt +++ b/ql/src/gen/vectorization/ExpressionTemplates/ColumnUnaryMinus.txt @@ -130,7 +130,7 @@ public class extends VectorExpression { VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(1) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("")) + VectorExpressionDescriptor.ArgumentType.getType("")) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); } diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt new file mode 100644 index 0000000..f2ec645 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIColumnNoConvert.txt @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.udf.UDFToString; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.io.LongWritable; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Generated from template DTIColumnArithmeticDTIColumnNoConvert.txt, which covers arithmetic + * expressions between a datetime/interval column and a datetime/interval column. + * No type conversion is needed, the operations can be performed using the vectorized long value + */ +public class extends LongColLongColumn { + + public (int colNum1, int colNum2, int outputColumn) { + super(colNum1, colNum2, outputColumn); + } + + public () { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} + + diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt new file mode 100644 index 0000000..1a360b8 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnArithmeticDTIScalarNoConvert.txt @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + + +/** + * Generated from template DTIColumnArithmeticDTIScalarNoConvert.txt, which covers arithmetic + * expressions between a datetime/interval column and a datetime/interval scalar. + * No type conversion is needed, the operations can be performed using the vectorized long value + */ +public class extends LongColLongScalar { + + public (int colNum, long value, int outputColumn) { + super(colNum, value, outputColumn); + } + + public () { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} + diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt new file mode 100644 index 0000000..9d692cb --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIColumnCompareScalar.txt @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + + +/** + * Generated from template DTIColumnCompareScalar.txt, which covers comparison + * expressions between a datetime/interval column and a scalar of the same type. The boolean output + * is stored in a separate boolean column. + */ +public class extends { + + public (int colNum, long value, int outputColumn) { + super(colNum, value, outputColumn); + } + + public () { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt new file mode 100644 index 0000000..753ea71 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarArithmeticDTIColumnNoConvert.txt @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.udf.UDFToString; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.io.LongWritable; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Generated from template DTIScalarArithmeticDTIColumnNoConvert.txt, which covers arithmetic + * expressions between a datetime/interval scalar and a datetime/interval column. + * No type conversion is needed, the operations can be performed using the vectorized long value + */ +public class extends LongScalarLongColumn { + + public (long value, int colNum, int outputColumn) { + super(value, colNum, outputColumn); + } + + public () { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} + diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt new file mode 100644 index 0000000..fdd453a --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/DTIScalarCompareColumn.txt @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.udf.UDFToString; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.io.LongWritable; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Generated from template DTIScalarCompareColumn.txt, which covers comparison + * expressions between a datetime/interval scalar and a column of the same type. The boolean output + * is stored in a separate boolean column. + */ +public class extends { + + public (long value, int colNum, int outputColumn) { + super(value, colNum, outputColumn); + } + + public () { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt new file mode 100644 index 0000000..cd7a1e7 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalColumnWithConvert.txt @@ -0,0 +1,175 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; +import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.util.DateTimeMath; + +/** + * Generated from template DateTimeColumnArithmeticIntervalColumnWithConvert.txt, which covers binary arithmetic + * expressions between columns. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum1; + private int colNum2; + private int outputColumn; + private DateTimeMath dtm = new DateTimeMath(); + + public (int colNum1, int colNum2, int outputColumn) { + this.colNum1 = colNum1; + this.colNum2 = colNum2; + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + inputColVector1 = () batch.cols[colNum1]; + inputColVector2 = () batch.cols[colNum2]; + outputColVector = () batch.cols[outputColumn]; + int[] sel = batch.selected; + int n = batch.size; + [] vector1 = inputColVector1.vector; + [] vector2 = inputColVector2.vector; + [] outputVector = outputColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + outputColVector.isRepeating = + inputColVector1.isRepeating && inputColVector2.isRepeating + || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0] + || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0]; + + // Handle nulls first + NullUtil.propagateNullsColCol( + inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse); + + /* Disregard nulls for processing. In other words, + * the arithmetic operation is performed even if one or + * more inputs are null. This is to improve speed by avoiding + * conditional checks in the inner loop. + */ + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + outputVector[0] = ((vector1[0]), (int) vector2[0]); + } else if (inputColVector1.isRepeating) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = ((vector1[0]), (int) vector2[i]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = ((vector1[0]), (int) vector2[i]); + } + } + } else if (inputColVector2.isRepeating) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = ((vector1[i]), (int) vector2[0]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = ((vector1[i]), (int) vector2[0]); + } + } + } else { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = ((vector1[i]), (int) vector2[i]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = ((vector1[i]), (int) vector2[i]); + } + } + } + + /* For the case when the output can have null values, follow + * the convention that the data values must be 1 for long and + * NaN for double. This is to prevent possible later zero-divide errors + * in complex arithmetic expressions like col2 / (col1 - 1) + * in the case when some col1 entries are null. + */ + NullUtil.setNullDataEntries(outputColVector, batch.selectedInUse, sel, n); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return ""; + } + + public int getColNum1() { + return colNum1; + } + + public void setColNum1(int colNum1) { + this.colNum1 = colNum1; + } + + public int getColNum2() { + return colNum2; + } + + public void setColNum2(int colNum2) { + this.colNum2 = colNum2; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} + diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt new file mode 100644 index 0000000..abee249 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/DateTimeColumnArithmeticIntervalScalarWithConvert.txt @@ -0,0 +1,152 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.ql.util.DateTimeMath; + +/** + * Generated from template ColumnArithmeticScalarWithConvert.txt, which covers binary arithmetic + * expressions between a column and a scalar. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum; + private value; + private int outputColumn; + private DateTimeMath dtm = new DateTimeMath(); + + public (int colNum, value, int outputColumn) { + this.colNum = colNum; + this.value = value; + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + inputColVector = () batch.cols[colNum]; + outputColVector = () batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] inputIsNull = inputColVector.isNull; + boolean[] outputIsNull = outputColVector.isNull; + outputColVector.noNulls = inputColVector.noNulls; + outputColVector.isRepeating = inputColVector.isRepeating; + int n = batch.size; + [] vector = inputColVector.vector; + [] outputVector = outputColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + if (inputColVector.isRepeating) { + outputVector[0] = ((vector[0]), (int) value); + + // Even if there are no nulls, we always copy over entry 0. Simplifies code. + outputIsNull[0] = inputIsNull[0]; + } else if (inputColVector.noNulls) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = ((vector[i]), (int) value); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = ((vector[i]), (int) value); + } + } + } else /* there are nulls */ { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = ((vector[i]), (int) value); + outputIsNull[i] = inputIsNull[i]; + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = ((vector[i]), (int) value); + } + System.arraycopy(inputIsNull, 0, outputIsNull, 0, n); + } + } + + NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return ""; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public getValue() { + return value; + } + + public void setValue( value) { + this.value = value; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt new file mode 100644 index 0000000..93a441a --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/DateTimeScalarArithmeticIntervalColumnWithConvert.txt @@ -0,0 +1,165 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.*; + + +/* + * Because of the templatized nature of the code, either or both + * of these ColumnVector imports may be needed. Listing both of them + * rather than using ....vectorization.*; + */ +import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; +import org.apache.hadoop.hive.ql.util.DateTimeMath; + +/** + * Generated from template DateTimeScalarArithmeticIntervalColumnWithConvert.txt. + * Implements a vectorized arithmetic operator with a scalar on the left and a + * column vector on the right. The result is output to an output column vector. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum; + private value; + private int outputColumn; + private DateTimeMath dtm = new DateTimeMath(); + + public ( value, int colNum, int outputColumn) { + this.colNum = colNum; + this.value = (value); + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + /** + * Method to evaluate scalar-column operation in vectorized fashion. + * + * @batch a package of rows with each column stored in a vector + */ + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + inputColVector = () batch.cols[colNum]; + outputColVector = () batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] inputIsNull = inputColVector.isNull; + boolean[] outputIsNull = outputColVector.isNull; + outputColVector.noNulls = inputColVector.noNulls; + outputColVector.isRepeating = inputColVector.isRepeating; + int n = batch.size; + [] vector = inputColVector.vector; + [] outputVector = outputColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + if (inputColVector.isRepeating) { + outputVector[0] = (value, (int) vector[0]); + + // Even if there are no nulls, we always copy over entry 0. Simplifies code. + outputIsNull[0] = inputIsNull[0]; + } else if (inputColVector.noNulls) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = (value, (int) vector[i]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = (value, (int) vector[i]); + } + } + } else { /* there are nulls */ + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = (value, (int) vector[i]); + outputIsNull[i] = inputIsNull[i]; + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = (value, (int) vector[i]); + } + System.arraycopy(inputIsNull, 0, outputIsNull, 0, n); + } + } + + NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return ""; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public getValue() { + return value; + } + + public void setValue( value) { + this.value = value; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt new file mode 100644 index 0000000..55193ac --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIColumnCompareScalar.txt @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils; + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Generated from template FilterDTIColumnCompareScalar.txt, which covers comparison + * expressions between a datetime/interval column and a scalar of the same type, however output is not + * produced in a separate column. + * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering. + */ +public class extends { + + public (int colNum, long value) { + super(colNum, value); + } + + public () { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.FILTER) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIScalarCompareColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIScalarCompareColumn.txt new file mode 100644 index 0000000..f9fb12e --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/FilterDTIScalarCompareColumn.txt @@ -0,0 +1,52 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Generated from template FilterDTIScalarCompareColumn.txt, which covers comparison + * expressions between a datetime/interval scalar and a column of the same type, + * however output is not produced in a separate column. + * The selected vector of the input {@link VectorizedRowBatch} is updated for in-place filtering. + */ +public class extends { + + public (long value, int colNum) { + super(value, colNum); + } + + public () { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.FILTER) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeColumnWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeColumnWithConvert.txt new file mode 100644 index 0000000..c182557 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeColumnWithConvert.txt @@ -0,0 +1,177 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; +import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.util.DateTimeMath; + +/** + * Generated from template IntervalColumnArithmeticDateTimeColumnWithConvert.txt, which covers binary arithmetic + * expressions between columns. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum1; + private int colNum2; + private int outputColumn; + private DateTimeMath dtm = new DateTimeMath(); + + public (int colNum1, int colNum2, int outputColumn) { + this.colNum1 = colNum1; + this.colNum2 = colNum2; + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + inputColVector1 = () batch.cols[colNum1]; + inputColVector2 = () batch.cols[colNum2]; + outputColVector = () batch.cols[outputColumn]; + int[] sel = batch.selected; + int n = batch.size; + [] vector1 = inputColVector1.vector; + [] vector2 = inputColVector2.vector; + [] outputVector = outputColVector.vector; + + // arg1 is interval type, arg2 is datetime type + + // return immediately if batch is empty + if (n == 0) { + return; + } + + outputColVector.isRepeating = + inputColVector1.isRepeating && inputColVector2.isRepeating + || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0] + || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0]; + + // Handle nulls first + NullUtil.propagateNullsColCol( + inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse); + + /* Disregard nulls for processing. In other words, + * the arithmetic operation is performed even if one or + * more inputs are null. This is to improve speed by avoiding + * conditional checks in the inner loop. + */ + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + outputVector[0] = ((vector2[0]), (int) vector1[0]); + } else if (inputColVector1.isRepeating) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = ((vector2[0]), (int) vector1[i]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = ((vector2[0]), (int) vector1[i]); + } + } + } else if (inputColVector2.isRepeating) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = ((vector2[i]), (int) vector1[0]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = ((vector2[i]), (int) vector1[0]); + } + } + } else { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = ((vector2[i]), (int) vector1[i]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = ((vector2[i]), (int) vector1[i]); + } + } + } + + /* For the case when the output can have null values, follow + * the convention that the data values must be 1 for long and + * NaN for double. This is to prevent possible later zero-divide errors + * in complex arithmetic expressions like col2 / (col1 - 1) + * in the case when some col1 entries are null. + */ + NullUtil.setNullDataEntries(outputColVector, batch.selectedInUse, sel, n); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return ""; + } + + public int getColNum1() { + return colNum1; + } + + public void setColNum1(int colNum1) { + this.colNum1 = colNum1; + } + + public int getColNum2() { + return colNum2; + } + + public void setColNum2(int colNum2) { + this.colNum2 = colNum2; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} + diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeScalarWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeScalarWithConvert.txt new file mode 100644 index 0000000..8fa3563 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalColumnArithmeticDateTimeScalarWithConvert.txt @@ -0,0 +1,154 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.ql.util.DateTimeMath; + +/** + * Generated from template IntervalColumnArithmeticDateTimeScalarWithConvert.txt, which covers binary arithmetic + * expressions between a column and a scalar. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum; + private value; + private int outputColumn; + private DateTimeMath dtm = new DateTimeMath(); + + public (int colNum, value, int outputColumn) { + this.colNum = colNum; + this.value = (value); + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + inputColVector = () batch.cols[colNum]; + outputColVector = () batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] inputIsNull = inputColVector.isNull; + boolean[] outputIsNull = outputColVector.isNull; + outputColVector.noNulls = inputColVector.noNulls; + outputColVector.isRepeating = inputColVector.isRepeating; + int n = batch.size; + [] vector = inputColVector.vector; + [] outputVector = outputColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + // arg1 is interval, arg2 is datetime + + if (inputColVector.isRepeating) { + outputVector[0] = (value, (int) vector[0]); + + // Even if there are no nulls, we always copy over entry 0. Simplifies code. + outputIsNull[0] = inputIsNull[0]; + } else if (inputColVector.noNulls) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = (value, (int) vector[i]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = (value, (int) vector[i]); + } + } + } else /* there are nulls */ { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = (value, (int) vector[i]); + outputIsNull[i] = inputIsNull[i]; + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = (value, (int) vector[i]); + } + System.arraycopy(inputIsNull, 0, outputIsNull, 0, n); + } + } + + NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return ""; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public getValue() { + return value; + } + + public void setValue( value) { + this.value = value; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/IntervalScalarArithmeticDateTimeColumnWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/IntervalScalarArithmeticDateTimeColumnWithConvert.txt new file mode 100644 index 0000000..0464a5e --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/IntervalScalarArithmeticDateTimeColumnWithConvert.txt @@ -0,0 +1,167 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.*; + + +/* + * Because of the templatized nature of the code, either or both + * of these ColumnVector imports may be needed. Listing both of them + * rather than using ....vectorization.*; + */ +import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; +import org.apache.hadoop.hive.ql.util.DateTimeMath; + +/** + * Generated from template IntervalScalarArithmeticDateTimeColumnWithConvert.txt. + * Implements a vectorized arithmetic operator with a scalar on the left and a + * column vector on the right. The result is output to an output column vector. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum; + private value; + private int outputColumn; + private DateTimeMath dtm = new DateTimeMath(); + + public ( value, int colNum, int outputColumn) { + this.colNum = colNum; + this.value = value; + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + /** + * Method to evaluate scalar-column operation in vectorized fashion. + * + * @batch a package of rows with each column stored in a vector + */ + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + inputColVector = () batch.cols[colNum]; + outputColVector = () batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] inputIsNull = inputColVector.isNull; + boolean[] outputIsNull = outputColVector.isNull; + outputColVector.noNulls = inputColVector.noNulls; + outputColVector.isRepeating = inputColVector.isRepeating; + int n = batch.size; + [] vector = inputColVector.vector; + [] outputVector = outputColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + // arg1 is interval, arg2 is datetime + + if (inputColVector.isRepeating) { + outputVector[0] = ((vector[0]), (int) value); + + // Even if there are no nulls, we always copy over entry 0. Simplifies code. + outputIsNull[0] = inputIsNull[0]; + } else if (inputColVector.noNulls) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = ((vector[i]), (int) value); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = ((vector[i]), (int) value); + } + } + } else { /* there are nulls */ + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = ((vector[i]), (int) value); + outputIsNull[i] = inputIsNull[i]; + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = ((vector[i]), (int) value); + } + System.arraycopy(inputIsNull, 0, outputIsNull, 0, n); + } + } + + NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return ""; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public getValue() { + return value; + } + + public void setValue( value) { + this.value = value; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git a/ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnWithConvert.txt b/ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnWithConvert.txt new file mode 100644 index 0000000..91887c8 --- /dev/null +++ b/ql/src/gen/vectorization/ExpressionTemplates/ScalarArithmeticColumnWithConvert.txt @@ -0,0 +1,163 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.*; + + +/* + * Because of the templatized nature of the code, either or both + * of these ColumnVector imports may be needed. Listing both of them + * rather than using ....vectorization.*; + */ +import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil; + +/** + * Generated from template ScalarArithmeticColumnWithConvert.txt. + * Implements a vectorized arithmetic operator with a scalar on the left and a + * column vector on the right. The result is output to an output column vector. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum; + private value; + private int outputColumn; + + public ( value, int colNum, int outputColumn) { + this.colNum = colNum; + this.value = (value); + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + /** + * Method to evaluate scalar-column operation in vectorized fashion. + * + * @batch a package of rows with each column stored in a vector + */ + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + inputColVector = () batch.cols[colNum]; + outputColVector = () batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] inputIsNull = inputColVector.isNull; + boolean[] outputIsNull = outputColVector.isNull; + outputColVector.noNulls = inputColVector.noNulls; + outputColVector.isRepeating = inputColVector.isRepeating; + int n = batch.size; + [] vector = inputColVector.vector; + [] outputVector = outputColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + if (inputColVector.isRepeating) { + outputVector[0] = value (vector[0]); + + // Even if there are no nulls, we always copy over entry 0. Simplifies code. + outputIsNull[0] = inputIsNull[0]; + } else if (inputColVector.noNulls) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = value (vector[i]); + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = value (vector[i]); + } + } + } else { /* there are nulls */ + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outputVector[i] = value (vector[i]); + outputIsNull[i] = inputIsNull[i]; + } + } else { + for(int i = 0; i != n; i++) { + outputVector[i] = value (vector[i]); + } + System.arraycopy(inputIsNull, 0, outputIsNull, 0, n); + } + } + + NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return ""; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public getValue() { + return value; + } + + public void setValue( value) { + this.value = value; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java index 352e43e..95dbf8d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampUtils.java @@ -20,6 +20,8 @@ import java.sql.Timestamp; +import org.apache.hadoop.hive.serde2.io.DateWritable; + public final class TimestampUtils { /** @@ -58,4 +60,8 @@ public static long secondsToNanoseconds(long seconds) { public static long doubleToNanoseconds(double d) { return (long) (d * 1000000000); } + + public static long daysToNanoseconds(long daysSinceEpoch) { + return DateWritable.daysToMillis((int) daysSinceEpoch) * 1000000; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java index c915f72..681f803 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java @@ -32,6 +32,8 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; @@ -47,6 +49,7 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; +import org.apache.hive.common.util.DateUtils; /** * This class is used as a static factory for VectorColumnAssign. @@ -338,6 +341,35 @@ public void assignObjectValue(Object val, int destIndex) throws HiveException { } }.init(outputBatch, (LongColumnVector) destCol); break; + case INTERVAL_YEAR_MONTH: + outVCA = new VectorLongColumnAssign() { + @Override + public void assignObjectValue(Object val, int destIndex) throws HiveException { + if (val == null) { + assignNull(destIndex); + } + else { + HiveIntervalYearMonthWritable bw = (HiveIntervalYearMonthWritable) val; + assignLong(bw.getHiveIntervalYearMonth().getTotalMonths(), destIndex); + } + } + }.init(outputBatch, (LongColumnVector) destCol); + break; + case INTERVAL_DAY_TIME:outVCA = new VectorLongColumnAssign() { + @Override + public void assignObjectValue(Object val, int destIndex) throws HiveException { + if (val == null) { + assignNull(destIndex); + } + else { + HiveIntervalDayTimeWritable bw = (HiveIntervalDayTimeWritable) val; + assignLong( + DateUtils.getIntervalDayTimeTotalNanos(bw.getHiveIntervalDayTime()), + destIndex); + } + } + }.init(outputBatch, (LongColumnVector) destCol); + break; default: throw new HiveException("Incompatible Long vector column and primitive category " + category); @@ -535,6 +567,10 @@ public void assignObjectValue(Object val, int destIndex) throws HiveException { vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.BINARY); } else if (writables[i] instanceof TimestampWritable) { vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.TIMESTAMP); + } else if (writables[i] instanceof HiveIntervalYearMonthWritable) { + vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.INTERVAL_YEAR_MONTH); + } else if (writables[i] instanceof HiveIntervalDayTimeWritable) { + vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.INTERVAL_DAY_TIME); } else if (writables[i] instanceof BooleanWritable) { vcas[i] = buildObjectAssign(outputBatch, i, PrimitiveCategory.BOOLEAN); } else { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java index bb18b32..9d241bd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java @@ -22,6 +22,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hive.common.util.AnnotationUtils; /** @@ -66,9 +67,13 @@ STRING_FAMILY (STRING.value | CHAR.value | VARCHAR.value), DATE (0x040), TIMESTAMP (0x080), + INTERVAL_YEAR_MONTH (0x100), + INTERVAL_DAY_TIME (0x200), DATETIME_FAMILY (DATE.value | TIMESTAMP.value), + INTERVAL_FAMILY (INTERVAL_YEAR_MONTH.value | INTERVAL_DAY_TIME.value), INT_TIMESTAMP_FAMILY (INT_FAMILY.value | TIMESTAMP.value), - INT_DATETIME_FAMILY (INT_FAMILY.value | DATETIME_FAMILY.value), + INT_INTERVAL_FAMILY (INT_FAMILY.value | INTERVAL_FAMILY.value), + INT_DATETIME_INTERVAL_FAMILY (INT_FAMILY.value | DATETIME_FAMILY.value | INTERVAL_FAMILY.value), STRING_DATETIME_FAMILY (STRING_FAMILY.value | DATETIME_FAMILY.value), ALL_FAMILY (0xFFF); @@ -105,6 +110,10 @@ public static ArgumentType fromHiveTypeName(String hiveTypeName) { return TIMESTAMP; } else if (lower.equals("date")) { return DATE; + } else if (lower.equals(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME)) { + return INTERVAL_YEAR_MONTH; + } else if (lower.equals(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME)) { + return INTERVAL_DAY_TIME; } else if (lower.equals("void")) { // The old code let void through... return INT_FAMILY; @@ -137,7 +146,9 @@ public boolean isSameTypeOrFamily(ArgumentType other) { public static String getVectorColumnSimpleName(ArgumentType argType) { if (argType == INT_FAMILY || argType == DATE || - argType == TIMESTAMP) { + argType == TIMESTAMP || + argType == INTERVAL_YEAR_MONTH || + argType == INTERVAL_DAY_TIME) { return "Long"; } else if (argType == FLOAT_FAMILY) { return "Double"; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index 5201c57..14a1059 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -38,6 +38,8 @@ import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; @@ -113,6 +115,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.util.StringUtils; +import org.apache.hive.common.util.DateUtils; /** * Context class for vectorization execution. @@ -253,6 +256,8 @@ public void addProjectionColumn(String columnName, int vectorBatchColIndex) { castExpressionUdfs.add(GenericUDFToChar.class); castExpressionUdfs.add(GenericUDFToVarchar.class); castExpressionUdfs.add(GenericUDFTimestamp.class); + castExpressionUdfs.add(GenericUDFToIntervalYearMonth.class); + castExpressionUdfs.add(GenericUDFToIntervalDayTime.class); castExpressionUdfs.add(UDFToByte.class); castExpressionUdfs.add(UDFToBoolean.class); castExpressionUdfs.add(UDFToDouble.class); @@ -658,6 +663,12 @@ private GenericUDF getGenericUDFForCast(TypeInfo castType) throws HiveException case TIMESTAMP: genericUdf = new GenericUDFToUnixTimeStamp(); break; + case INTERVAL_YEAR_MONTH: + genericUdf = new GenericUDFToIntervalYearMonth(); + break; + case INTERVAL_DAY_TIME: + genericUdf = new GenericUDFToIntervalDayTime(); + break; case BINARY: genericUdf = new GenericUDFToBinary(); break; @@ -871,8 +882,16 @@ private VectorExpression getConstantVectorExpression(Object constantValue, TypeI switch (vectorArgType) { case INT_FAMILY: return new ConstantVectorExpression(outCol, ((Number) constantValue).longValue()); + case DATE: + return new ConstantVectorExpression(outCol, DateWritable.dateToDays((Date) constantValue)); case TIMESTAMP: return new ConstantVectorExpression(outCol, TimestampUtils.getTimeNanoSec((Timestamp) constantValue)); + case INTERVAL_YEAR_MONTH: + return new ConstantVectorExpression(outCol, + ((HiveIntervalYearMonth) constantValue).getTotalMonths()); + case INTERVAL_DAY_TIME: + return new ConstantVectorExpression(outCol, + DateUtils.getIntervalDayTimeTotalNanos((HiveIntervalDayTime) constantValue)); case FLOAT_FAMILY: return new ConstantVectorExpression(outCol, ((Number) constantValue).doubleValue()); case DECIMAL: @@ -1773,6 +1792,14 @@ public static boolean isDateFamily(String resultType) { return resultType.equalsIgnoreCase("date"); } + public static boolean isIntervalYearMonthFamily(String resultType) { + return resultType.equalsIgnoreCase("interval_year_month"); + } + + public static boolean isIntervalDayTimeFamily(String resultType) { + return resultType.equalsIgnoreCase("interval_day_time"); + } + // return true if this is any kind of float public static boolean isFloatFamily(String resultType) { return resultType.equalsIgnoreCase("double") @@ -1843,12 +1870,19 @@ private double getNumericScalarAsDouble(ExprNodeDesc constDesc) private Object getVectorTypeScalarValue(ExprNodeConstantDesc constDesc) throws HiveException { String t = constDesc.getTypeInfo().getTypeName(); - if (isTimestampFamily(t)) { - return TimestampUtils.getTimeNanoSec((Timestamp) getScalarValue(constDesc)); - } else if (isDateFamily(t)) { - return DateWritable.dateToDays((Date) getScalarValue(constDesc)); - } else { - return getScalarValue(constDesc); + VectorExpression.Type type = VectorExpression.Type.getValue(t); + Object scalarValue = getScalarValue(constDesc); + switch (type) { + case TIMESTAMP: + return TimestampUtils.getTimeNanoSec((Timestamp) scalarValue); + case DATE: + return DateWritable.dateToDays((Date) scalarValue); + case INTERVAL_YEAR_MONTH: + return ((HiveIntervalYearMonth) scalarValue).getTotalMonths(); + case INTERVAL_DAY_TIME: + return DateUtils.getIntervalDayTimeTotalNanos((HiveIntervalDayTime) scalarValue); + default: + return scalarValue; } } @@ -1935,6 +1969,9 @@ static String getNormalizedName(String hiveTypeName) { return "Date"; case TIMESTAMP: return "Timestamp"; + case INTERVAL_YEAR_MONTH: + case INTERVAL_DAY_TIME: + return hiveTypeName; default: return "None"; } @@ -1959,6 +1996,9 @@ static String getUndecoratedName(String hiveTypeName) { return "Date"; case TIMESTAMP: return "Timestamp"; + case INTERVAL_YEAR_MONTH: + case INTERVAL_DAY_TIME: + return hiveTypeName; default: return "None"; } @@ -1969,16 +2009,16 @@ static String getUndecoratedName(String hiveTypeName) { // TODO: And, investigate if different reduce-side versions are needed for var* and std*, or if map-side aggregate can be used.. Right now they are conservatively // marked map-side (HASH). static ArrayList aggregatesDefinition = new ArrayList() {{ - add(new AggregateDefinition("min", VectorExpressionDescriptor.ArgumentType.INT_DATETIME_FAMILY, null, VectorUDAFMinLong.class)); + add(new AggregateDefinition("min", VectorExpressionDescriptor.ArgumentType.INT_DATETIME_INTERVAL_FAMILY, null, VectorUDAFMinLong.class)); add(new AggregateDefinition("min", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, null, VectorUDAFMinDouble.class)); add(new AggregateDefinition("min", VectorExpressionDescriptor.ArgumentType.STRING_FAMILY, null, VectorUDAFMinString.class)); add(new AggregateDefinition("min", VectorExpressionDescriptor.ArgumentType.DECIMAL, null, VectorUDAFMinDecimal.class)); - add(new AggregateDefinition("max", VectorExpressionDescriptor.ArgumentType.INT_DATETIME_FAMILY, null, VectorUDAFMaxLong.class)); + add(new AggregateDefinition("max", VectorExpressionDescriptor.ArgumentType.INT_DATETIME_INTERVAL_FAMILY, null, VectorUDAFMaxLong.class)); add(new AggregateDefinition("max", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, null, VectorUDAFMaxDouble.class)); add(new AggregateDefinition("max", VectorExpressionDescriptor.ArgumentType.STRING_FAMILY, null, VectorUDAFMaxString.class)); add(new AggregateDefinition("max", VectorExpressionDescriptor.ArgumentType.DECIMAL, null, VectorUDAFMaxDecimal.class)); add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.NONE, GroupByDesc.Mode.HASH, VectorUDAFCountStar.class)); - add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.INT_DATETIME_FAMILY, GroupByDesc.Mode.HASH, VectorUDAFCount.class)); + add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.INT_DATETIME_INTERVAL_FAMILY, GroupByDesc.Mode.HASH, VectorUDAFCount.class)); add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.INT_FAMILY, GroupByDesc.Mode.MERGEPARTIAL, VectorUDAFCountMerge.class)); add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY, GroupByDesc.Mode.HASH, VectorUDAFCount.class)); add(new AggregateDefinition("count", VectorExpressionDescriptor.ArgumentType.STRING_FAMILY, GroupByDesc.Mode.HASH, VectorUDAFCount.class)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java index e304cf8..506284a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java @@ -26,6 +26,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.type.HiveChar; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -34,6 +36,8 @@ import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; +import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; @@ -50,6 +54,7 @@ import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; +import org.apache.hive.common.util.DateUtils; public class VectorizedBatchUtil { private static final Log LOG = LogFactory.getLog(VectorizedBatchUtil.class); @@ -126,6 +131,8 @@ private static void allocateColumnVector(StructObjectInspector oi, case LONG: case TIMESTAMP: case DATE: + case INTERVAL_YEAR_MONTH: + case INTERVAL_DAY_TIME: cvList.add(new LongColumnVector(VectorizedRowBatch.DEFAULT_SIZE)); break; case FLOAT: @@ -394,6 +401,30 @@ private static void setVector(Object row, } } break; + case INTERVAL_YEAR_MONTH: { + LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex]; + if (writableCol != null) { + HiveIntervalYearMonth i = ((HiveIntervalYearMonthWritable) writableCol).getHiveIntervalYearMonth(); + lcv.vector[rowIndex] = i.getTotalMonths(); + lcv.isNull[rowIndex] = false; + } else { + lcv.vector[rowIndex] = 1; + setNullColIsNullValue(lcv, rowIndex); + } + } + break; + case INTERVAL_DAY_TIME: { + LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex]; + if (writableCol != null) { + HiveIntervalDayTime i = ((HiveIntervalDayTimeWritable) writableCol).getHiveIntervalDayTime(); + lcv.vector[rowIndex] = DateUtils.getIntervalDayTimeTotalNanos(i); + lcv.isNull[rowIndex] = false; + } else { + lcv.vector[rowIndex] = 1; + setNullColIsNullValue(lcv, rowIndex); + } + } + break; case BINARY: { BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex]; if (writableCol != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index 88ec2b2..b48c2ca 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -35,6 +35,8 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; @@ -42,6 +44,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.hive.ql.plan.PartitionDesc; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; @@ -61,6 +64,7 @@ import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapred.FileSplit; +import org.apache.hive.common.util.DateUtils; /** * Context for Vectorized row batch. this calss does eager deserialization of row data using serde @@ -301,6 +305,8 @@ public VectorizedRowBatch createVectorizedRowBatch() throws HiveException case LONG: case TIMESTAMP: case DATE: + case INTERVAL_YEAR_MONTH: + case INTERVAL_DAY_TIME: result.cols[j] = new LongColumnVector(VectorizedRowBatch.DEFAULT_SIZE); break; case FLOAT: @@ -503,7 +509,31 @@ public void addPartitionColsToBatch(VectorizedRowBatch batch) throws HiveExcepti } } break; - + + case INTERVAL_YEAR_MONTH: { + LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex]; + if (value == null) { + lcv.noNulls = false; + lcv.isNull[0] = true; + lcv.isRepeating = true; + } else { + lcv.fill(((HiveIntervalYearMonth) value).getTotalMonths()); + lcv.isNull[0] = false; + } + } + + case INTERVAL_DAY_TIME: { + LongColumnVector lcv = (LongColumnVector) batch.cols[colIndex]; + if (value == null) { + lcv.noNulls = false; + lcv.isNull[0] = true; + lcv.isRepeating = true; + } else { + lcv.fill(DateUtils.getIntervalDayTimeTotalNanos((HiveIntervalDayTime) value)); + lcv.isNull[0] = false; + } + } + case FLOAT: { DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[colIndex]; if (value == null) { @@ -637,7 +667,9 @@ private ColumnVector allocateColumnVector(String type, int defaultSize) { return new DecimalColumnVector(defaultSize, precisionScale[0], precisionScale[1]); } else if (type.equalsIgnoreCase("long") || type.equalsIgnoreCase("date") || - type.equalsIgnoreCase("timestamp")) { + type.equalsIgnoreCase("timestamp") || + type.equalsIgnoreCase(serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME) || + type.equalsIgnoreCase(serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME)) { return new LongColumnVector(defaultSize); } else { throw new Error("Cannot allocate vector column for " + type); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java new file mode 100644 index 0000000..518d5d5 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java @@ -0,0 +1,160 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hive.common.util.DateUtils; + + +/** + * Casts a string vector to a interval day-time vector. + */ +public class CastStringToIntervalDayTime extends VectorExpression { + private static final long serialVersionUID = 1L; + + private int inputColumn; + private int outputColumn; + + public CastStringToIntervalDayTime() { + + } + + public CastStringToIntervalDayTime(int inputColumn, int outputColumn) { + this.inputColumn = inputColumn; + this.outputColumn = outputColumn; + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + BytesColumnVector inV = (BytesColumnVector) batch.cols[inputColumn]; + int[] sel = batch.selected; + int n = batch.size; + LongColumnVector outV = (LongColumnVector) batch.cols[outputColumn]; + + if (n == 0) { + + // Nothing to do + return; + } + + if (inV.noNulls) { + outV.noNulls = true; + if (inV.isRepeating) { + outV.isRepeating = true; + evaluate(outV, inV, 0); + } else if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + evaluate(outV, inV, i); + } + outV.isRepeating = false; + } else { + for(int i = 0; i != n; i++) { + evaluate(outV, inV, i); + } + outV.isRepeating = false; + } + } else { + + // Handle case with nulls. Don't do function if the value is null, + // because the data may be undefined for a null value. + outV.noNulls = false; + if (inV.isRepeating) { + outV.isRepeating = true; + outV.isNull[0] = inV.isNull[0]; + if (!inV.isNull[0]) { + evaluate(outV, inV, 0); + } + } else if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outV.isNull[i] = inV.isNull[i]; + if (!inV.isNull[i]) { + evaluate(outV, inV, i); + } + } + outV.isRepeating = false; + } else { + System.arraycopy(inV.isNull, 0, outV.isNull, 0, n); + for(int i = 0; i != n; i++) { + if (!inV.isNull[i]) { + evaluate(outV, inV, i); + } + } + outV.isRepeating = false; + } + } + } + + private void evaluate(LongColumnVector outV, BytesColumnVector inV, int i) { + try { + HiveIntervalDayTime interval = HiveIntervalDayTime.valueOf( + new String(inV.vector[i], inV.start[i], inV.length[i], "UTF-8")); + outV.vector[i] = DateUtils.getIntervalDayTimeTotalNanos(interval); + } catch (Exception e) { + outV.vector[i] = 1; + outV.isNull[i] = true; + outV.noNulls = false; + } + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + public int getInputColumn() { + return inputColumn; + } + + public void setInputColumn(int inputColumn) { + this.inputColumn = inputColumn; + } + + @Override + public String getOutputType() { + return serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder(); + b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(1) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.STRING_FAMILY) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN); + return b.build(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalYearMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalYearMonth.java new file mode 100644 index 0000000..62f3dc9 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalYearMonth.java @@ -0,0 +1,159 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.serde.serdeConstants; + + +/** + * Casts a string vector to a interval year-month vector. + */ +public class CastStringToIntervalYearMonth extends VectorExpression { + private static final long serialVersionUID = 1L; + + private int inputColumn; + private int outputColumn; + + public CastStringToIntervalYearMonth() { + + } + + public CastStringToIntervalYearMonth(int inputColumn, int outputColumn) { + this.inputColumn = inputColumn; + this.outputColumn = outputColumn; + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + BytesColumnVector inV = (BytesColumnVector) batch.cols[inputColumn]; + int[] sel = batch.selected; + int n = batch.size; + LongColumnVector outV = (LongColumnVector) batch.cols[outputColumn]; + + if (n == 0) { + + // Nothing to do + return; + } + + if (inV.noNulls) { + outV.noNulls = true; + if (inV.isRepeating) { + outV.isRepeating = true; + evaluate(outV, inV, 0); + } else if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + evaluate(outV, inV, i); + } + outV.isRepeating = false; + } else { + for(int i = 0; i != n; i++) { + evaluate(outV, inV, i); + } + outV.isRepeating = false; + } + } else { + + // Handle case with nulls. Don't do function if the value is null, + // because the data may be undefined for a null value. + outV.noNulls = false; + if (inV.isRepeating) { + outV.isRepeating = true; + outV.isNull[0] = inV.isNull[0]; + if (!inV.isNull[0]) { + evaluate(outV, inV, 0); + } + } else if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outV.isNull[i] = inV.isNull[i]; + if (!inV.isNull[i]) { + evaluate(outV, inV, i); + } + } + outV.isRepeating = false; + } else { + System.arraycopy(inV.isNull, 0, outV.isNull, 0, n); + for(int i = 0; i != n; i++) { + if (!inV.isNull[i]) { + evaluate(outV, inV, i); + } + } + outV.isRepeating = false; + } + } + } + + private void evaluate(LongColumnVector outV, BytesColumnVector inV, int i) { + try { + HiveIntervalYearMonth interval = HiveIntervalYearMonth.valueOf( + new String(inV.vector[i], inV.start[i], inV.length[i], "UTF-8")); + outV.vector[i] = interval.getTotalMonths(); + } catch (Exception e) { + outV.vector[i] = 1; + outV.isNull[i] = true; + outV.noNulls = false; + } + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + public int getInputColumn() { + return inputColumn; + } + + public void setInputColumn(int inputColumn) { + this.inputColumn = inputColumn; + } + + @Override + public String getOutputType() { + return serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + VectorExpressionDescriptor.Builder b = new VectorExpressionDescriptor.Builder(); + b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(1) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.STRING_FAMILY) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN); + return b.build(); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java index d7ace6d..c0e4cf0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpression.java @@ -30,7 +30,8 @@ */ public abstract class VectorExpression implements Serializable { public enum Type { - STRING, CHAR, VARCHAR, TIMESTAMP, DATE, LONG, DOUBLE, DECIMAL, OTHER; + STRING, CHAR, VARCHAR, TIMESTAMP, DATE, LONG, DOUBLE, DECIMAL, + INTERVAL_YEAR_MONTH, INTERVAL_DAY_TIME, OTHER; private static Map types = ImmutableMap.builder() .put("string", STRING) .put("char", CHAR) @@ -40,6 +41,8 @@ .put("long", LONG) .put("double", DOUBLE) .put("decimal", DECIMAL) + .put("interval_year_month", INTERVAL_YEAR_MONTH) + .put("interval_day_time", INTERVAL_DAY_TIME) .build(); public static Type getValue(String name) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java index 94a47e0..b2798d2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java @@ -28,6 +28,8 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.vector.*; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -47,6 +49,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableFloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveCharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveDecimalObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveIntervalDayTimeObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveIntervalYearMonthObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableLongObjectInspector; @@ -56,6 +60,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.Text; +import org.apache.hive.common.util.DateUtils; /** * VectorExpressionWritableFactory helper class for generating VectorExpressionWritable objects. @@ -430,6 +435,12 @@ public static VectorExpressionWriter genVectorExpressionWritable( case DATE: return genVectorExpressionWritableDate( (SettableDateObjectInspector) fieldObjInspector); + case INTERVAL_YEAR_MONTH: + return genVectorExpressionWritableIntervalYearMonth( + (SettableHiveIntervalYearMonthObjectInspector) fieldObjInspector); + case INTERVAL_DAY_TIME: + return genVectorExpressionWritableIntervalDayTime( + (SettableHiveIntervalDayTimeObjectInspector) fieldObjInspector); case DECIMAL: return genVectorExpressionWritableDecimal( (SettableHiveDecimalObjectInspector) fieldObjInspector); @@ -587,6 +598,84 @@ public Object initValue(Object ignored) { }.init(fieldObjInspector); } + private static VectorExpressionWriter genVectorExpressionWritableIntervalYearMonth( + SettableHiveIntervalYearMonthObjectInspector fieldObjInspector) throws HiveException { + return new VectorExpressionWriterLong() { + private Object obj; + private HiveIntervalYearMonth interval; + + public VectorExpressionWriter init(SettableHiveIntervalYearMonthObjectInspector objInspector) + throws HiveException { + super.init(objInspector); + interval = new HiveIntervalYearMonth(); + obj = initValue(null); + return this; + } + + @Override + public Object writeValue(long value) { + interval.set((int) value); + ((SettableHiveIntervalYearMonthObjectInspector) this.objectInspector).set(obj, interval); + return obj; + } + + @Override + public Object setValue(Object field, long value) { + if (null == field) { + field = initValue(null); + } + interval.set((int) value); + ((SettableHiveIntervalYearMonthObjectInspector) this.objectInspector).set(field, interval); + return field; + } + + @Override + public Object initValue(Object ignored) { + return ((SettableHiveIntervalYearMonthObjectInspector) this.objectInspector) + .create(new HiveIntervalYearMonth()); + } + }.init(fieldObjInspector); + } + + private static VectorExpressionWriter genVectorExpressionWritableIntervalDayTime( + SettableHiveIntervalDayTimeObjectInspector fieldObjInspector) throws HiveException { + return new VectorExpressionWriterLong() { + private Object obj; + private HiveIntervalDayTime interval; + + public VectorExpressionWriter init(SettableHiveIntervalDayTimeObjectInspector objInspector) + throws HiveException { + super.init(objInspector); + interval = new HiveIntervalDayTime(); + obj = initValue(null); + return this; + } + + @Override + public Object writeValue(long value) { + DateUtils.setIntervalDayTimeTotalNanos(interval, value); + ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector).set(obj, interval); + return obj; + } + + @Override + public Object setValue(Object field, long value) { + if (null == field) { + field = initValue(null); + } + DateUtils.setIntervalDayTimeTotalNanos(interval, value); + ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector).set(field, interval); + return field; + } + + @Override + public Object initValue(Object ignored) { + return ((SettableHiveIntervalDayTimeObjectInspector) this.objectInspector) + .create(new HiveIntervalDayTime()); + } + }.init(fieldObjInspector); + } + private static VectorExpressionWriter genVectorExpressionWritableChar( SettableHiveCharObjectInspector fieldObjInspector) throws HiveException { return new VectorExpressionWriterBytes() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java index 10bf2bd..8c8e288 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/ASTBuilder.java @@ -34,7 +34,6 @@ import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.ParseDriver; -import org.apache.hive.common.util.DateTimeMath; class ASTBuilder { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index aca4273..44ab1bd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -118,6 +118,7 @@ import org.apache.hadoop.hive.ql.udf.UDFWeekOfYear; import org.apache.hadoop.hive.ql.udf.UDFYear; import org.apache.hadoop.hive.ql.udf.generic.*; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; @@ -148,6 +149,8 @@ public Vectorizer() { patternBuilder.append("|long"); patternBuilder.append("|short"); patternBuilder.append("|timestamp"); + patternBuilder.append("|" + serdeConstants.INTERVAL_YEAR_MONTH_TYPE_NAME); + patternBuilder.append("|" + serdeConstants.INTERVAL_DAY_TIME_TYPE_NAME); patternBuilder.append("|boolean"); patternBuilder.append("|binary"); patternBuilder.append("|string"); @@ -261,6 +264,8 @@ public Vectorizer() { supportedGenericUDFs.add(GenericUDFToDate.class); supportedGenericUDFs.add(GenericUDFToChar.class); supportedGenericUDFs.add(GenericUDFToVarchar.class); + supportedGenericUDFs.add(GenericUDFToIntervalYearMonth.class); + supportedGenericUDFs.add(GenericUDFToIntervalDayTime.class); // For conditional expressions supportedGenericUDFs.add(GenericUDFIf.class); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java index a32c133..116b3c4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIMinus.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; @@ -44,7 +45,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.apache.hive.common.util.DateTimeMath; @Description(name = "-", value = "a _FUNC_ b - Returns the difference a-b") public class GenericUDFOPDTIMinus extends GenericUDFBaseDTI { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java index 9a5c3a9..6417f02 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDTIPlus.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.util.DateTimeMath; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable; import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable; @@ -44,7 +45,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.apache.hive.common.util.DateTimeMath; @Description(name = "+", value = "a _FUNC_ b - Returns a+b") public class GenericUDFOPDTIPlus extends GenericUDFBaseDTI { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java index 3870b51..428ced7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java @@ -55,7 +55,13 @@ TimestampColEqualLongScalar.class, LongScalarEqualTimestampColumn.class, FilterTimestampColEqualLongScalar.class, FilterLongScalarEqualTimestampColumn.class, TimestampColEqualDoubleScalar.class, DoubleScalarEqualTimestampColumn.class, - FilterTimestampColEqualDoubleScalar.class, FilterDoubleScalarEqualTimestampColumn.class + FilterTimestampColEqualDoubleScalar.class, FilterDoubleScalarEqualTimestampColumn.class, + IntervalYearMonthScalarEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarEqualIntervalYearMonthColumn.class, + IntervalYearMonthColEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColEqualIntervalYearMonthScalar.class, + IntervalDayTimeScalarEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarEqualIntervalDayTimeColumn.class, + IntervalDayTimeColEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColEqualIntervalDayTimeScalar.class, + DateColEqualDateScalar.class,FilterDateColEqualDateScalar.class, + DateScalarEqualDateColumn.class,FilterDateScalarEqualDateColumn.class, }) public class GenericUDFOPEqual extends GenericUDFBaseCompare { public GenericUDFOPEqual(){ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java index 65e1835..d9556cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java @@ -56,7 +56,13 @@ TimestampColGreaterEqualLongScalar.class, LongScalarGreaterEqualTimestampColumn.class, FilterTimestampColGreaterEqualLongScalar.class, FilterLongScalarGreaterEqualTimestampColumn.class, TimestampColGreaterEqualDoubleScalar.class, DoubleScalarGreaterEqualTimestampColumn.class, - FilterTimestampColGreaterEqualDoubleScalar.class, FilterDoubleScalarGreaterEqualTimestampColumn.class + FilterTimestampColGreaterEqualDoubleScalar.class, FilterDoubleScalarGreaterEqualTimestampColumn.class, + IntervalYearMonthScalarGreaterEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarGreaterEqualIntervalYearMonthColumn.class, + IntervalYearMonthColGreaterEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColGreaterEqualIntervalYearMonthScalar.class, + IntervalDayTimeScalarGreaterEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarGreaterEqualIntervalDayTimeColumn.class, + IntervalDayTimeColGreaterEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColGreaterEqualIntervalDayTimeScalar.class, + DateColGreaterEqualDateScalar.class,FilterDateColGreaterEqualDateScalar.class, + DateScalarGreaterEqualDateColumn.class,FilterDateScalarGreaterEqualDateColumn.class, }) public class GenericUDFOPEqualOrGreaterThan extends GenericUDFBaseCompare { public GenericUDFOPEqualOrGreaterThan(){ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java index 3e4a1d2..1d9eaf6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java @@ -56,7 +56,13 @@ TimestampColLessEqualLongScalar.class, LongScalarLessEqualTimestampColumn.class, FilterTimestampColLessEqualLongScalar.class, FilterLongScalarLessEqualTimestampColumn.class, TimestampColLessEqualDoubleScalar.class, DoubleScalarLessEqualTimestampColumn.class, - FilterTimestampColLessEqualDoubleScalar.class, FilterDoubleScalarLessEqualTimestampColumn.class + FilterTimestampColLessEqualDoubleScalar.class, FilterDoubleScalarLessEqualTimestampColumn.class, + IntervalYearMonthScalarLessEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarLessEqualIntervalYearMonthColumn.class, + IntervalYearMonthColLessEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColLessEqualIntervalYearMonthScalar.class, + IntervalDayTimeScalarLessEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarLessEqualIntervalDayTimeColumn.class, + IntervalDayTimeColLessEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColLessEqualIntervalDayTimeScalar.class, + DateColLessEqualDateScalar.class,FilterDateColLessEqualDateScalar.class, + DateScalarLessEqualDateColumn.class,FilterDateScalarLessEqualDateColumn.class, }) public class GenericUDFOPEqualOrLessThan extends GenericUDFBaseCompare { public GenericUDFOPEqualOrLessThan(){ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java index df7a857..8e1f2b1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java @@ -56,7 +56,13 @@ TimestampColGreaterLongScalar.class, LongScalarGreaterTimestampColumn.class, FilterTimestampColGreaterLongScalar.class, FilterLongScalarGreaterTimestampColumn.class, TimestampColGreaterDoubleScalar.class, DoubleScalarGreaterTimestampColumn.class, - FilterTimestampColGreaterDoubleScalar.class, FilterDoubleScalarGreaterTimestampColumn.class + FilterTimestampColGreaterDoubleScalar.class, FilterDoubleScalarGreaterTimestampColumn.class, + IntervalYearMonthScalarGreaterIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarGreaterIntervalYearMonthColumn.class, + IntervalYearMonthColGreaterIntervalYearMonthScalar.class, FilterIntervalYearMonthColGreaterIntervalYearMonthScalar.class, + IntervalDayTimeScalarGreaterIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarGreaterIntervalDayTimeColumn.class, + IntervalDayTimeColGreaterIntervalDayTimeScalar.class, FilterIntervalDayTimeColGreaterIntervalDayTimeScalar.class, + DateColGreaterDateScalar.class,FilterDateColGreaterDateScalar.class, + DateScalarGreaterDateColumn.class,FilterDateScalarGreaterDateColumn.class, }) public class GenericUDFOPGreaterThan extends GenericUDFBaseCompare { public GenericUDFOPGreaterThan(){ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java index fafd99b..101b348 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java @@ -56,7 +56,13 @@ TimestampColLessLongScalar.class, LongScalarLessTimestampColumn.class, FilterTimestampColLessLongScalar.class, FilterLongScalarLessTimestampColumn.class, TimestampColLessDoubleScalar.class, DoubleScalarLessTimestampColumn.class, - FilterTimestampColLessDoubleScalar.class, FilterDoubleScalarLessTimestampColumn.class + FilterTimestampColLessDoubleScalar.class, FilterDoubleScalarLessTimestampColumn.class, + IntervalYearMonthScalarLessIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarLessIntervalYearMonthColumn.class, + IntervalYearMonthColLessIntervalYearMonthScalar.class, FilterIntervalYearMonthColLessIntervalYearMonthScalar.class, + IntervalDayTimeScalarLessIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarLessIntervalDayTimeColumn.class, + IntervalDayTimeColLessIntervalDayTimeScalar.class, FilterIntervalDayTimeColLessIntervalDayTimeScalar.class, + DateColLessDateScalar.class,FilterDateColLessDateScalar.class, + DateScalarLessDateColumn.class,FilterDateScalarLessDateColumn.class, }) public class GenericUDFOPLessThan extends GenericUDFBaseCompare { public GenericUDFOPLessThan(){ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java index 18fbb5a..d6a0c58 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPMinus.java @@ -31,7 +31,38 @@ LongScalarSubtractLongColumn.class, LongScalarSubtractDoubleColumn.class, DoubleScalarSubtractLongColumn.class, DoubleScalarSubtractDoubleColumn.class, DecimalColSubtractDecimalColumn.class, DecimalColSubtractDecimalScalar.class, - DecimalScalarSubtractDecimalColumn.class}) + DecimalScalarSubtractDecimalColumn.class, + IntervalYearMonthColSubtractIntervalYearMonthColumn.class, + IntervalYearMonthColSubtractIntervalYearMonthScalar.class, + IntervalYearMonthScalarSubtractIntervalYearMonthColumn.class, + IntervalDayTimeColSubtractIntervalDayTimeColumn.class, + IntervalDayTimeColSubtractIntervalDayTimeScalar.class, + IntervalDayTimeScalarSubtractIntervalDayTimeColumn.class, + TimestampColSubtractIntervalDayTimeColumn.class, + TimestampColSubtractIntervalDayTimeScalar.class, + TimestampScalarSubtractIntervalDayTimeColumn.class, + TimestampColSubtractTimestampColumn.class, + TimestampColSubtractTimestampScalar.class, + TimestampScalarSubtractTimestampColumn.class, + DateColSubtractDateColumn.class, + DateColSubtractDateScalar.class, + DateScalarSubtractDateColumn.class, + DateColSubtractTimestampColumn.class, + DateColSubtractTimestampScalar.class, + DateScalarSubtractTimestampColumn.class, + TimestampColSubtractDateColumn.class, + TimestampColSubtractDateScalar.class, + TimestampScalarSubtractDateColumn.class, + DateColSubtractIntervalDayTimeColumn.class, + DateColSubtractIntervalDayTimeScalar.class, + DateScalarSubtractIntervalDayTimeColumn.class, + DateColSubtractIntervalYearMonthColumn.class, + DateScalarSubtractIntervalYearMonthColumn.class, + DateColSubtractIntervalYearMonthScalar.class, + TimestampColSubtractIntervalYearMonthColumn.class, + TimestampScalarSubtractIntervalYearMonthColumn.class, + TimestampColSubtractIntervalYearMonthScalar.class, +}) public class GenericUDFOPMinus extends GenericUDFBaseArithmetic { public GenericUDFOPMinus() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java index 0436488..b5da57a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java @@ -55,7 +55,13 @@ TimestampColNotEqualLongScalar.class, LongScalarNotEqualTimestampColumn.class, FilterTimestampColNotEqualLongScalar.class, FilterLongScalarNotEqualTimestampColumn.class, TimestampColNotEqualDoubleScalar.class, DoubleScalarNotEqualTimestampColumn.class, - FilterTimestampColNotEqualDoubleScalar.class, FilterDoubleScalarNotEqualTimestampColumn.class + FilterTimestampColNotEqualDoubleScalar.class, FilterDoubleScalarNotEqualTimestampColumn.class, + IntervalYearMonthScalarNotEqualIntervalYearMonthColumn.class, FilterIntervalYearMonthScalarNotEqualIntervalYearMonthColumn.class, + IntervalYearMonthColNotEqualIntervalYearMonthScalar.class, FilterIntervalYearMonthColNotEqualIntervalYearMonthScalar.class, + IntervalDayTimeScalarNotEqualIntervalDayTimeColumn.class, FilterIntervalDayTimeScalarNotEqualIntervalDayTimeColumn.class, + IntervalDayTimeColNotEqualIntervalDayTimeScalar.class, FilterIntervalDayTimeColNotEqualIntervalDayTimeScalar.class, + DateColNotEqualDateScalar.class,FilterDateColNotEqualDateScalar.class, + DateScalarNotEqualDateColumn.class,FilterDateScalarNotEqualDateColumn.class, }) public class GenericUDFOPNotEqual extends GenericUDFBaseCompare { public GenericUDFOPNotEqual(){ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java index bfac5a8..5755a99 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPPlus.java @@ -37,7 +37,38 @@ LongColAddDoubleScalar.class, DoubleColAddLongScalar.class, DoubleColAddDoubleScalar.class, LongScalarAddLongColumn.class, LongScalarAddDoubleColumn.class, DoubleScalarAddLongColumn.class, DoubleScalarAddDoubleColumn.class, DecimalScalarAddDecimalColumn.class, DecimalColAddDecimalColumn.class, - DecimalColAddDecimalScalar.class}) + DecimalColAddDecimalScalar.class, + IntervalYearMonthColAddIntervalYearMonthColumn.class, + IntervalYearMonthColAddIntervalYearMonthScalar.class, + IntervalYearMonthScalarAddIntervalYearMonthColumn.class, + IntervalDayTimeColAddIntervalDayTimeColumn.class, + IntervalDayTimeColAddIntervalDayTimeScalar.class, + IntervalDayTimeScalarAddIntervalDayTimeColumn.class, + IntervalDayTimeColAddTimestampColumn.class, + IntervalDayTimeColAddTimestampScalar.class, + IntervalDayTimeScalarAddTimestampColumn.class, + TimestampColAddIntervalDayTimeColumn.class, + TimestampColAddIntervalDayTimeScalar.class, + TimestampScalarAddIntervalDayTimeColumn.class, + DateColAddIntervalDayTimeColumn.class, + DateColAddIntervalDayTimeScalar.class, + DateScalarAddIntervalDayTimeColumn.class, + IntervalDayTimeColAddDateColumn.class, + IntervalDayTimeColAddDateScalar.class, + IntervalDayTimeScalarAddDateColumn.class, + IntervalYearMonthColAddDateColumn.class, + IntervalYearMonthColAddDateScalar.class, + IntervalYearMonthScalarAddDateColumn.class, + IntervalYearMonthColAddTimestampColumn.class, + IntervalYearMonthColAddTimestampScalar.class, + IntervalYearMonthScalarAddTimestampColumn.class, + DateColAddIntervalYearMonthColumn.class, + DateScalarAddIntervalYearMonthColumn.class, + DateColAddIntervalYearMonthScalar.class, + TimestampColAddIntervalYearMonthColumn.class, + TimestampScalarAddIntervalYearMonthColumn.class, + TimestampColAddIntervalYearMonthScalar.class +}) public class GenericUDFOPPlus extends GenericUDFBaseArithmetic { public GenericUDFOPPlus() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java index 89c3988..e644320 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalDayTime.java @@ -21,6 +21,8 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToIntervalDayTime; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; @@ -39,6 +41,7 @@ */ @Description(name = "interval_day_time", value = "CAST( AS INTERVAL DAY TO SECOND) - Returns the day-time interval represented by the string") +@VectorizedExpressions({CastStringToIntervalDayTime.class}) public class GenericUDFToIntervalDayTime extends GenericUDF { private transient PrimitiveObjectInspector argumentOI; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java index 5c05655..92a40f8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToIntervalYearMonth.java @@ -21,6 +21,8 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; +import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToIntervalYearMonth; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; @@ -39,6 +41,7 @@ */ @Description(name = "interval_year_month", value = "CAST( AS INTERVAL YEAR TO MONTH) - Returns the year-month interval represented by the string") +@VectorizedExpressions({CastStringToIntervalYearMonth.class}) public class GenericUDFToIntervalYearMonth extends GenericUDF { private transient PrimitiveObjectInspector argumentOI; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java new file mode 100644 index 0000000..ff28995 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java @@ -0,0 +1,214 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.util; + +import java.sql.Date; +import java.sql.Timestamp; +import java.util.Calendar; +import java.util.TimeZone; +import java.util.concurrent.TimeUnit; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hive.common.util.DateUtils; + + +public class DateTimeMath { + + private static class NanosResult { + public int seconds; + public int nanos; + + public void addNanos(int leftNanos, int rightNanos) { + seconds = 0; + nanos = leftNanos + rightNanos; + if (nanos < 0) { + seconds = -1; + nanos += DateUtils.NANOS_PER_SEC; + } else if (nanos >= DateUtils.NANOS_PER_SEC) { + seconds = 1; + nanos -= DateUtils.NANOS_PER_SEC; + } + } + } + + protected Calendar calUtc = Calendar.getInstance(TimeZone.getTimeZone("UTC")); + protected Calendar calLocal = Calendar.getInstance(); + protected NanosResult nanosResult = new NanosResult(); + + // + // Operations involving/returning year-month intervals + // + + /** + * Perform month arithmetic to millis value using UTC time zone. + * @param millis + * @param months + * @return + */ + public long addMonthsToMillisUtc(long millis, int months) { + calUtc.setTimeInMillis(millis); + calUtc.add(Calendar.MONTH, months); + return calUtc.getTimeInMillis(); + } + + /** + * Perform month arithmetic to millis value using local time zone. + * @param millis + * @param months + * @return + */ + public long addMonthsToMillisLocal(long millis, int months) { + calLocal.setTimeInMillis(millis); + calLocal.add(Calendar.MONTH, months); + return calLocal.getTimeInMillis(); + } + + public long addMonthsToNanosUtc(long nanos, int months) { + long result = addMonthsToMillisUtc(nanos / 1000000, months) * 1000000 + (nanos % 1000000); + return result; + } + + public long addMonthsToNanosLocal(long nanos, int months) { + long result = addMonthsToMillisLocal(nanos / 1000000, months) * 1000000 + (nanos % 1000000); + return result; + } + + public long addMonthsToDays(long days, int months) { + long millis = DateWritable.daysToMillis((int) days); + millis = addMonthsToMillisLocal(millis, months); + // Convert millis result back to days + return DateWritable.millisToDays(millis); + } + + public Timestamp add(Timestamp ts, HiveIntervalYearMonth interval) { + if (ts == null || interval == null) { + return null; + } + + // Attempt to match Oracle semantics for timestamp arithmetic, + // where timestamp arithmetic is done in UTC, then converted back to local timezone + long resultMillis = addMonthsToMillisUtc(ts.getTime(), interval.getTotalMonths()); + Timestamp tsResult = new Timestamp(resultMillis); + tsResult.setNanos(ts.getNanos()); + + return tsResult; + } + + public Date add(Date dt, HiveIntervalYearMonth interval) { + if (dt == null || interval == null) { + return null; + } + + // Since Date millis value is in local timezone representation, do date arithmetic + // using local timezone so the time remains at the start of the day. + long resultMillis = addMonthsToMillisLocal(dt.getTime(), interval.getTotalMonths()); + return new Date(resultMillis); + } + + public HiveIntervalYearMonth add(HiveIntervalYearMonth left, HiveIntervalYearMonth right) { + HiveIntervalYearMonth result = null; + if (left == null || right == null) { + return null; + } + + result = new HiveIntervalYearMonth(left.getTotalMonths() + right.getTotalMonths()); + return result; + } + + public Timestamp subtract(Timestamp left, HiveIntervalYearMonth right) { + if (left == null || right == null) { + return null; + } + return add(left, right.negate()); + } + + public Date subtract(Date left, HiveIntervalYearMonth right) { + if (left == null || right == null) { + return null; + } + return add(left, right.negate()); + } + + public HiveIntervalYearMonth subtract(HiveIntervalYearMonth left, HiveIntervalYearMonth right) { + if (left == null || right == null) { + return null; + } + return add(left, right.negate()); + } + + // + // Operations involving/returning day-time intervals + // + + public Timestamp add(Timestamp ts, HiveIntervalDayTime interval) { + if (ts == null || interval == null) { + return null; + } + + nanosResult.addNanos(ts.getNanos(), interval.getNanos()); + + long newMillis = ts.getTime() + + TimeUnit.SECONDS.toMillis(interval.getTotalSeconds() + nanosResult.seconds); + Timestamp tsResult = new Timestamp(newMillis); + tsResult.setNanos(nanosResult.nanos); + return tsResult; + } + + public HiveIntervalDayTime add(HiveIntervalDayTime left, HiveIntervalDayTime right) { + HiveIntervalDayTime result = null; + if (left == null || right == null) { + return null; + } + + nanosResult.addNanos(left.getNanos(), right.getNanos()); + + long totalSeconds = left.getTotalSeconds() + right.getTotalSeconds() + nanosResult.seconds; + result = new HiveIntervalDayTime(totalSeconds, nanosResult.nanos); + return result; + } + + public Timestamp subtract(Timestamp left, HiveIntervalDayTime right) { + if (left == null || right == null) { + return null; + } + return add(left, right.negate()); + } + + public HiveIntervalDayTime subtract(HiveIntervalDayTime left, HiveIntervalDayTime right) { + if (left == null || right == null) { + return null; + } + return add(left, right.negate()); + } + + public HiveIntervalDayTime subtract(Timestamp left, Timestamp right) { + HiveIntervalDayTime result = null; + if (left == null || right == null) { + return null; + } + + nanosResult.addNanos(left.getNanos(), -(right.getNanos())); + + long totalSeconds = TimeUnit.MILLISECONDS.toSeconds(left.getTime()) + - TimeUnit.MILLISECONDS.toSeconds(right.getTime()) + nanosResult.seconds; + result = new HiveIntervalDayTime(totalSeconds, nanosResult.nanos); + return result; + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java index efe2efe..becebd4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java @@ -151,7 +151,7 @@ public void testVectorExpressionDescriptor() { VectorUDFUnixTimeStampLong v1 = new VectorUDFUnixTimeStampLong(); VectorExpressionDescriptor.Builder builder1 = new VectorExpressionDescriptor.Builder(); VectorExpressionDescriptor.Descriptor d1 = builder1.setMode(VectorExpressionDescriptor.Mode.PROJECTION) - .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.INT_DATETIME_FAMILY) + .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.INT_DATETIME_INTERVAL_FAMILY) .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); assertTrue(d1.matches(v1.getDescriptor())); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java b/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java new file mode 100644 index 0000000..35fe941 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/util/TestDateTimeMath.java @@ -0,0 +1,464 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.util; + +import java.sql.Date; +import java.sql.Timestamp; +import java.util.TimeZone; + +import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; +import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; +import org.apache.hadoop.hive.ql.util.DateTimeMath; +import org.junit.*; + +import static org.junit.Assert.*; + +public class TestDateTimeMath { + + @Test + public void testTimestampIntervalYearMonthArithmetic() throws Exception { + char plus = '+'; + char minus = '-'; + + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-0", + "2001-01-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "1-1", + "2002-02-01 01:02:03.456"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "10-0", + "2011-01-01 01:02:03.456"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.456", plus, "0-11", + "2001-12-01 01:02:03.456"); + checkTimestampIntervalYearMonthArithmetic("2001-03-01 01:02:03.500", plus, "1-11", + "2003-02-01 01:02:03.500"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.500", plus, "-1-1", + "1999-12-01 01:02:03.500"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.500", plus, "-0-0", + "2001-01-01 01:02:03.500"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.123456789", plus, "-0-0", + "2001-01-01 01:02:03.123456789"); + + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "0-0", + "2001-01-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "10-0", + "1991-01-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "-10-0", + "2011-01-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "8-2", + "1992-11-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, "-8-2", + "2009-03-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03.123456789", minus, "8-2", + "1992-11-01 01:02:03.123456789"); + + checkTimestampIntervalYearMonthArithmetic(null, plus, "1-1", + null); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, null, + null); + checkTimestampIntervalYearMonthArithmetic(null, minus, "1-1", + null); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", minus, null, + null); + + // End of the month behavior + checkTimestampIntervalYearMonthArithmetic("2001-01-28 01:02:03", plus, "0-1", + "2001-02-28 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-29 01:02:03", plus, "0-1", + "2001-02-28 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-30 01:02:03", plus, "0-1", + "2001-02-28 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-01-31 01:02:03", plus, "0-1", + "2001-02-28 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-02-28 01:02:03", plus, "0-1", + "2001-03-28 01:02:03"); + + // Test that timestamp arithmetic is done in UTC and then converted back to local timezone, + // matching Oracle behavior. + TimeZone originalTz = TimeZone.getDefault(); + try { + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-6", + "2001-07-01 02:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-07-01 01:02:03", plus, "0-6", + "2002-01-01 00:02:03"); + + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + checkTimestampIntervalYearMonthArithmetic("2001-01-01 01:02:03", plus, "0-6", + "2001-07-01 01:02:03"); + checkTimestampIntervalYearMonthArithmetic("2001-07-01 01:02:03", plus, "0-6", + "2002-01-01 01:02:03"); + } finally { + TimeZone.setDefault(originalTz); + } + } + + @Test + public void testDateIntervalYearMonthArithmetic() throws Exception { + char plus = '+'; + char minus = '-'; + + checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-0", "2001-01-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-1", "2001-02-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "0-6", "2001-07-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "1-0", "2002-01-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", plus, "1-1", "2002-02-01"); + checkDateIntervalDayTimeArithmetic("2001-10-10", plus, "1-6", "2003-04-10"); + checkDateIntervalDayTimeArithmetic("2003-04-10", plus, "-1-6", "2001-10-10"); + + checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "0-0", "2001-01-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "0-1", "2000-12-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "1-0", "2000-01-01"); + checkDateIntervalDayTimeArithmetic("2001-01-01", minus, "1-1", "1999-12-01"); + checkDateIntervalDayTimeArithmetic("2001-10-10", minus, "1-6", "2000-04-10"); + checkDateIntervalDayTimeArithmetic("2003-04-10", minus, "-1-6", "2004-10-10"); + + // end of month behavior + checkDateIntervalDayTimeArithmetic("2001-01-28", plus, "0-1", "2001-02-28"); + checkDateIntervalDayTimeArithmetic("2001-01-29", plus, "0-1", "2001-02-28"); + checkDateIntervalDayTimeArithmetic("2001-01-30", plus, "0-1", "2001-02-28"); + checkDateIntervalDayTimeArithmetic("2001-01-31", plus, "0-1", "2001-02-28"); + checkDateIntervalDayTimeArithmetic("2001-01-31", plus, "0-2", "2001-03-31"); + checkDateIntervalDayTimeArithmetic("2001-02-28", plus, "0-1", "2001-03-28"); + // leap year + checkDateIntervalDayTimeArithmetic("2004-01-28", plus, "0-1", "2004-02-28"); + checkDateIntervalDayTimeArithmetic("2004-01-29", plus, "0-1", "2004-02-29"); + checkDateIntervalDayTimeArithmetic("2004-01-30", plus, "0-1", "2004-02-29"); + checkDateIntervalDayTimeArithmetic("2004-01-31", plus, "0-1", "2004-02-29"); + } + + @Test + public void testIntervalYearMonthArithmetic() throws Exception { + char plus = '+'; + char minus = '-'; + + checkIntervalYearMonthArithmetic("0-0", plus, "0-0", "0-0"); + checkIntervalYearMonthArithmetic("0-0", plus, "4-5", "4-5"); + checkIntervalYearMonthArithmetic("4-5", plus, "0-0", "4-5"); + checkIntervalYearMonthArithmetic("0-0", plus, "1-1", "1-1"); + checkIntervalYearMonthArithmetic("1-1", plus, "0-0", "1-1"); + + checkIntervalYearMonthArithmetic("0-0", minus, "0-0", "0-0"); + checkIntervalYearMonthArithmetic("0-0", minus, "1-0", "-1-0"); + checkIntervalYearMonthArithmetic("1-2", minus, "1-1", "0-1"); + checkIntervalYearMonthArithmetic("0-0", minus, "1-1", "-1-1"); + checkIntervalYearMonthArithmetic("-1-1", minus, "1-1", "-2-2"); + checkIntervalYearMonthArithmetic("-1-1", minus, "-1-1", "0-0"); + + checkIntervalYearMonthArithmetic(null, plus, "1-1", null); + checkIntervalYearMonthArithmetic("1-1", plus, null, null); + checkIntervalYearMonthArithmetic(null, minus, "1-1", null); + checkIntervalYearMonthArithmetic("1-1", minus, null, null); + } + + @Test + public void testTimestampIntervalDayTimeArithmetic() throws Exception { + char plus = '+'; + char minus = '-'; + + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, "1 1:1:1", + "2001-01-02 02:03:04"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1", + "2001-01-02 02:03:04.456"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1.555", + "2001-01-02 02:03:05.011"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, "1 1:1:1.555555555", + "2001-01-02 02:03:04.555555555"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.456", plus, "1 1:1:1.555555555", + "2001-01-02 02:03:05.011555555"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.499", + "2001-01-02 02:03:04.999"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.500", + "2001-01-02 02:03:05.0"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500", plus, "1 1:1:1.501", + "2001-01-02 02:03:05.001"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.4999999999", + "2001-01-02 02:03:04.999999999"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.500", + "2001-01-02 02:03:05.0"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03.500000000", plus, "1 1:1:1.500000001", + "2001-01-02 02:03:05.000000001"); + + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, "0 01:02:03", + "2001-01-01 00:00:00"); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, "0 0:0:0", + "2001-01-01 01:02:03"); + + checkTsIntervalDayTimeArithmetic(null, plus, "1 1:1:1.555555555", + null); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", plus, null, + null); + checkTsIntervalDayTimeArithmetic(null, minus, "1 1:1:1.555555555", + null); + checkTsIntervalDayTimeArithmetic("2001-01-01 01:02:03", minus, null, + null); + + // Try some time zone boundaries + TimeZone originalTz = TimeZone.getDefault(); + try { + // America/Los_Angeles DST dates - 2015-03-08 02:00:00/2015-11-01 02:00:00 + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); + + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:58", plus, "0 0:0:01", + "2015-03-08 01:59:59"); + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59", plus, "0 0:0:01", + "2015-03-08 03:00:00"); + checkTsIntervalDayTimeArithmetic("2015-03-08 03:00:00", minus, "0 0:0:01", + "2015-03-08 01:59:59"); + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59.995", plus, "0 0:0:0.005", + "2015-03-08 03:00:00"); + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59.995", plus, "0 0:0:0.0051", + "2015-03-08 03:00:00.0001"); + checkTsIntervalDayTimeArithmetic("2015-03-08 03:00:00", minus, "0 0:0:0.005", + "2015-03-08 01:59:59.995"); + checkTsIntervalDayTimeArithmetic("2015-11-01 01:59:58", plus, "0 0:0:01", + "2015-11-01 01:59:59"); + checkTsIntervalDayTimeArithmetic("2015-11-01 01:59:59", plus, "0 0:0:01", + "2015-11-01 02:00:00"); + + // UTC has no such adjustment + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:58", plus, "0 0:0:01", + "2015-03-08 01:59:59"); + checkTsIntervalDayTimeArithmetic("2015-03-08 01:59:59", plus, "0 0:0:01", + "2015-03-08 02:00:00"); + } finally { + TimeZone.setDefault(originalTz); + } + } + + @Test + public void testIntervalDayTimeArithmetic() throws Exception { + char plus = '+'; + char minus = '-'; + + checkIntervalDayTimeArithmetic("0 0:0:0", plus, "0 0:0:0", "0 0:0:0"); + checkIntervalDayTimeArithmetic("0 01:02:03", plus, "6 0:0:0.0001", "6 01:02:03.0001"); + checkIntervalDayTimeArithmetic("6 0:0:0.0001", plus, "0 01:02:03", "6 01:02:03.0001"); + checkIntervalDayTimeArithmetic("0 01:02:03", plus, "1 10:10:10.0001", "1 11:12:13.0001"); + checkIntervalDayTimeArithmetic("1 10:10:10.0001", plus, "0 01:02:03", "1 11:12:13.0001"); + checkIntervalDayTimeArithmetic("0 0:0:0.900000000", plus, "0 0:0:0.099999999", "0 0:0:0.999999999"); + checkIntervalDayTimeArithmetic("0 0:0:0.900000001", plus, "0 0:0:0.099999999", "0 0:0:1"); + checkIntervalDayTimeArithmetic("0 0:0:0.900000002", plus, "0 0:0:0.099999999", "0 0:0:1.000000001"); + + checkIntervalDayTimeArithmetic("0 0:0:0", minus, "0 0:0:0", "0 0:0:0"); + checkIntervalDayTimeArithmetic("0 0:0:0", minus, "0 0:0:0.123", "-0 0:0:0.123"); + checkIntervalDayTimeArithmetic("3 4:5:6.789", minus, "1 1:1:1.111", "2 3:4:5.678"); + checkIntervalDayTimeArithmetic("0 0:0:0.0", minus, "1 1:1:1.111", "-1 1:1:1.111"); + checkIntervalDayTimeArithmetic("-1 1:1:1.222", minus, "1 1:1:1.111", "-2 2:2:2.333"); + checkIntervalDayTimeArithmetic("-1 1:1:1.111", minus, "-1 1:1:1.111", "0 0:0:0"); + + checkIntervalDayTimeArithmetic(null, plus, "1 1:1:1.111", null); + checkIntervalDayTimeArithmetic("1 1:1:1.111", plus, null, null); + checkIntervalDayTimeArithmetic(null, minus, "1 1:1:1.111", null); + checkIntervalDayTimeArithmetic("1 1:1:1.111", minus, null, null); + } + + @Test + public void testTimestampSubtraction() throws Exception { + checkTsArithmetic("2001-01-01 00:00:00", "2001-01-01 00:00:00", "0 0:0:0"); + checkTsArithmetic("2002-02-02 01:01:01", "2001-01-01 00:00:00", "397 1:1:1"); + checkTsArithmetic("2001-01-01 00:00:00", "2002-02-02 01:01:01", "-397 1:1:1"); + checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 00:00:00", "1 0:0:0"); + checkTsArithmetic("2014-12-31 00:00:00", "2015-01-01 00:00:00", "-1 0:0:0"); + checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 23:59:59", "0 0:0:01"); + checkTsArithmetic("2014-12-31 23:59:59", "2015-01-01 00:00:00", "-0 0:0:01"); + checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 23:59:59.9999", "0 0:0:00.0001"); + checkTsArithmetic("2014-12-31 23:59:59.9999", "2015-01-01 00:00:00", "-0 0:0:00.0001"); + checkTsArithmetic("2015-01-01 00:00:00", "2014-12-31 11:12:13.000000001", "0 12:47:46.999999999"); + checkTsArithmetic("2014-12-31 11:12:13.000000001", "2015-01-01 00:00:00", "-0 12:47:46.999999999"); + + // Test that timestamp arithmetic is done in UTC and then converted back to local timezone, + // matching Oracle behavior. + TimeZone originalTz = TimeZone.getDefault(); + try { + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); + checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 1:0:0"); + checkTsArithmetic("1999-09-15 00:00:00", "1999-12-15 00:00:00", "-91 1:0:0"); + checkTsArithmetic("1999-12-15 00:00:00", "1995-09-15 00:00:00", "1552 1:0:0"); + checkTsArithmetic("1995-09-15 00:00:00", "1999-12-15 00:00:00", "-1552 1:0:0"); + + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + checkTsArithmetic("1999-12-15 00:00:00", "1999-09-15 00:00:00", "91 0:0:0"); + checkTsArithmetic("1999-09-15 00:00:00", "1999-12-15 00:00:00", "-91 0:0:0"); + checkTsArithmetic("1999-12-15 00:00:00", "1995-09-15 00:00:00", "1552 0:0:0"); + checkTsArithmetic("1995-09-15 00:00:00", "1999-12-15 00:00:00", "-1552 0:0:0"); + } finally { + TimeZone.setDefault(originalTz); + } + } + + private static void checkTimestampIntervalYearMonthArithmetic( + String left, char operationType, String right, String expected) throws Exception { + Timestamp leftTs = null; + if (left != null) { + leftTs = Timestamp.valueOf(left); + } + HiveIntervalYearMonth rightInterval = null; + if (right != null) { + rightInterval = HiveIntervalYearMonth.valueOf(right); + } + Timestamp expectedResult = null; + if (expected != null) { + expectedResult = Timestamp.valueOf(expected); + } + Timestamp testResult = null; + + DateTimeMath dtm = new DateTimeMath(); + switch (operationType) { + case '-': + testResult = dtm.subtract(leftTs, rightInterval); + break; + case '+': + testResult = dtm.add(leftTs, rightInterval); + break; + default: + throw new IllegalArgumentException("Invalid operation " + operationType); + } + + assertEquals(String.format("%s %s %s", leftTs, operationType, rightInterval), + expectedResult, testResult); + } + + private static void checkDateIntervalDayTimeArithmetic( + String left, char operationType, String right, String expected) throws Exception { + Date leftDt = null; + if (left != null) { + leftDt = Date.valueOf(left); + } + HiveIntervalYearMonth rightInterval = null; + if (right != null) { + rightInterval = HiveIntervalYearMonth.valueOf(right); + } + Date expectedResult = null; + if (expected != null) { + expectedResult = Date.valueOf(expected); + } + Date testResult = null; + + DateTimeMath dtm = new DateTimeMath(); + switch (operationType) { + case '-': + testResult = dtm.subtract(leftDt, rightInterval); + break; + case '+': + testResult = dtm.add(leftDt, rightInterval); + break; + default: + throw new IllegalArgumentException("Invalid operation " + operationType); + } + + assertEquals(String.format("%s %s %s", leftDt, operationType, rightInterval), + expectedResult, testResult); + } + + private static void checkIntervalYearMonthArithmetic( + String left, char operationType, String right, String expected) throws Exception { + HiveIntervalYearMonth leftInterval = left == null ? null: HiveIntervalYearMonth.valueOf(left); + HiveIntervalYearMonth rightInterval = right == null ? null : HiveIntervalYearMonth.valueOf(right); + HiveIntervalYearMonth expectedResult = expected == null ? null : HiveIntervalYearMonth.valueOf(expected); + HiveIntervalYearMonth testResult = null; + + DateTimeMath dtm = new DateTimeMath(); + switch (operationType) { + case '-': + testResult = dtm.subtract(leftInterval, rightInterval); + break; + case '+': + testResult = dtm.add(leftInterval, rightInterval); + break; + default: + throw new IllegalArgumentException("Invalid operation " + operationType); + } + + assertEquals(String.format("%s %s %s", leftInterval, operationType, rightInterval), + expectedResult, testResult); + } + + private static void checkTsIntervalDayTimeArithmetic( + String left, char operationType, String right, String expected) throws Exception { + Timestamp leftTs = null; + if (left != null) { + leftTs = Timestamp.valueOf(left); + } + HiveIntervalDayTime rightInterval = right == null ? null : HiveIntervalDayTime.valueOf(right); + Timestamp expectedResult = null; + if (expected != null) { + expectedResult = Timestamp.valueOf(expected); + } + Timestamp testResult = null; + + DateTimeMath dtm = new DateTimeMath(); + switch (operationType) { + case '-': + testResult = dtm.subtract(leftTs, rightInterval); + break; + case '+': + testResult = dtm.add(leftTs, rightInterval); + break; + default: + throw new IllegalArgumentException("Invalid operation " + operationType); + } + + assertEquals(String.format("%s %s %s", leftTs, operationType, rightInterval), + expectedResult, testResult); + } + + private static void checkIntervalDayTimeArithmetic( + String left, char operationType, String right, String expected) throws Exception { + HiveIntervalDayTime leftInterval = left == null ? null : HiveIntervalDayTime.valueOf(left); + HiveIntervalDayTime rightInterval = right == null ? null : HiveIntervalDayTime.valueOf(right); + HiveIntervalDayTime expectedResult = expected == null ? null : HiveIntervalDayTime.valueOf(expected); + HiveIntervalDayTime testResult = null; + + DateTimeMath dtm = new DateTimeMath(); + switch (operationType) { + case '-': + testResult = dtm.subtract(leftInterval, rightInterval); + break; + case '+': + testResult = dtm.add(leftInterval, rightInterval); + break; + default: + throw new IllegalArgumentException("Invalid operation " + operationType); + } + + assertEquals(String.format("%s %s %s", leftInterval, operationType, rightInterval), + expectedResult, testResult); + } + + private static void checkTsArithmetic( + String left, String right, String expected) throws Exception { + Timestamp leftTs = null; + if (left != null) { + leftTs = Timestamp.valueOf(left); + } + Timestamp rightTs = null; + if (left != null) { + rightTs = Timestamp.valueOf(right); + } + HiveIntervalDayTime expectedResult = null; + if (expected != null) { + expectedResult = HiveIntervalDayTime.valueOf(expected); + } + DateTimeMath dtm = new DateTimeMath(); + HiveIntervalDayTime testResult = + dtm.subtract(leftTs, rightTs); + + assertEquals(String.format("%s - %s", leftTs, rightTs), + expectedResult, testResult); + } +} diff --git a/ql/src/test/queries/clientpositive/vector_date_1.q b/ql/src/test/queries/clientpositive/vector_date_1.q new file mode 100644 index 0000000..908c082 --- /dev/null +++ b/ql/src/test/queries/clientpositive/vector_date_1.q @@ -0,0 +1,184 @@ + +set hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=minimal; + +drop table if exists vector_date_1; +create table vector_date_1 (dt1 date, dt2 date) stored as orc; + +insert into table vector_date_1 + select null, null from src limit 1; +insert into table vector_date_1 + select date '1999-12-31', date '2000-01-01' from src limit 1; +insert into table vector_date_1 + select date '2001-01-01', date '2001-06-01' from src limit 1; + +-- column-to-column comparison in select clause +explain +select + dt1, dt2, + -- should be all true + dt1 = dt1, + dt1 != dt2, + dt1 <= dt1, + dt1 <= dt2, + dt1 < dt2, + dt2 >= dt2, + dt2 >= dt1, + dt2 > dt1 +from vector_date_1 order by dt1; + +select + dt1, dt2, + -- should be all true + dt1 = dt1, + dt1 != dt2, + dt1 <= dt1, + dt1 <= dt2, + dt1 < dt2, + dt2 >= dt2, + dt2 >= dt1, + dt2 > dt1 +from vector_date_1 order by dt1; + +explain +select + dt1, dt2, + -- should be all false + dt1 != dt1, + dt1 = dt2, + dt1 < dt1, + dt1 >= dt2, + dt1 > dt2, + dt2 > dt2, + dt2 <= dt1, + dt2 < dt1 +from vector_date_1 order by dt1; + +select + dt1, dt2, + -- should be all false + dt1 != dt1, + dt1 = dt2, + dt1 < dt1, + dt1 >= dt2, + dt1 > dt2, + dt2 > dt2, + dt2 <= dt1, + dt2 < dt1 +from vector_date_1 order by dt1; + +-- column-to-literal/literal-to-column comparison in select clause +explain +select + dt1, + -- should be all true + dt1 != date '1970-01-01', + dt1 >= date '1970-01-01', + dt1 > date '1970-01-01', + dt1 <= date '2100-01-01', + dt1 < date '2100-01-01', + date '1970-01-01' != dt1, + date '1970-01-01' <= dt1, + date '1970-01-01' < dt1 +from vector_date_1 order by dt1; + +select + dt1, + -- should be all true + dt1 != date '1970-01-01', + dt1 >= date '1970-01-01', + dt1 > date '1970-01-01', + dt1 <= date '2100-01-01', + dt1 < date '2100-01-01', + date '1970-01-01' != dt1, + date '1970-01-01' <= dt1, + date '1970-01-01' < dt1 +from vector_date_1 order by dt1; + +explain +select + dt1, + -- should all be false + dt1 = date '1970-01-01', + dt1 <= date '1970-01-01', + dt1 < date '1970-01-01', + dt1 >= date '2100-01-01', + dt1 > date '2100-01-01', + date '1970-01-01' = dt1, + date '1970-01-01' >= dt1, + date '1970-01-01' > dt1 +from vector_date_1 order by dt1; + +select + dt1, + -- should all be false + dt1 = date '1970-01-01', + dt1 <= date '1970-01-01', + dt1 < date '1970-01-01', + dt1 >= date '2100-01-01', + dt1 > date '2100-01-01', + date '1970-01-01' = dt1, + date '1970-01-01' >= dt1, + date '1970-01-01' > dt1 +from vector_date_1 order by dt1; + + +-- column-to-column comparisons in predicate +-- all rows with non-null dt1 should be returned +explain +select + dt1, dt2 +from vector_date_1 +where + dt1 = dt1 + and dt1 != dt2 + and dt1 < dt2 + and dt1 <= dt2 + and dt2 > dt1 + and dt2 >= dt1 +order by dt1; + +select + dt1, dt2 +from vector_date_1 +where + dt1 = dt1 + and dt1 != dt2 + and dt1 < dt2 + and dt1 <= dt2 + and dt2 > dt1 + and dt2 >= dt1 +order by dt1; + +-- column-to-literal/literal-to-column comparison in predicate +-- only a single row should be returned +explain +select + dt1, dt2 +from vector_date_1 +where + dt1 = date '2001-01-01' + and date '2001-01-01' = dt1 + and dt1 != date '1970-01-01' + and date '1970-01-01' != dt1 + and dt1 > date '1970-01-01' + and dt1 >= date '1970-01-01' + and date '1970-01-01' < dt1 + and date '1970-01-01' <= dt1 +order by dt1; + +select + dt1, dt2 +from vector_date_1 +where + dt1 = date '2001-01-01' + and date '2001-01-01' = dt1 + and dt1 != date '1970-01-01' + and date '1970-01-01' != dt1 + and dt1 > date '1970-01-01' + and dt1 >= date '1970-01-01' + and date '1970-01-01' < dt1 + and date '1970-01-01' <= dt1 +order by dt1; + +drop table vector_date_1; diff --git a/ql/src/test/queries/clientpositive/vector_interval_1.q b/ql/src/test/queries/clientpositive/vector_interval_1.q new file mode 100644 index 0000000..1f3e620 --- /dev/null +++ b/ql/src/test/queries/clientpositive/vector_interval_1.q @@ -0,0 +1,196 @@ + +set hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=minimal; + +drop table if exists vector_interval_1; +create table vector_interval_1 (ts timestamp, dt date, str1 string, str2 string) stored as orc; + +insert into vector_interval_1 + select timestamp '2001-01-01 01:02:03', date '2001-01-01', '1-2', '1 2:3:4' from src limit 1; +insert into vector_interval_1 + select null, null, null, null from src limit 1; + +-- constants/cast from string +explain +select + str1, + interval '1-2' year to month, interval_year_month(str1), + interval '1 2:3:4' day to second, interval_day_time(str2) +from vector_interval_1 order by str1; + +select + str1, + interval '1-2' year to month, interval_year_month(str1), + interval '1 2:3:4' day to second, interval_day_time(str2) +from vector_interval_1 order by str1; + + +-- interval arithmetic +explain +select + dt, + interval '1-2' year to month + interval '1-2' year to month, + interval_year_month(str1) + interval_year_month(str1), + interval '1-2' year to month + interval_year_month(str1), + interval '1-2' year to month - interval '1-2' year to month, + interval_year_month(str1) - interval_year_month(str1), + interval '1-2' year to month - interval_year_month(str1) +from vector_interval_1 order by dt; + +select + dt, + interval '1-2' year to month + interval '1-2' year to month, + interval_year_month(str1) + interval_year_month(str1), + interval '1-2' year to month + interval_year_month(str1), + interval '1-2' year to month - interval '1-2' year to month, + interval_year_month(str1) - interval_year_month(str1), + interval '1-2' year to month - interval_year_month(str1) +from vector_interval_1 order by dt; + +explain +select + dt, + interval '1 2:3:4' day to second + interval '1 2:3:4' day to second, + interval_day_time(str2) + interval_day_time(str2), + interval '1 2:3:4' day to second + interval_day_time(str2), + interval '1 2:3:4' day to second - interval '1 2:3:4' day to second, + interval_day_time(str2) - interval_day_time(str2), + interval '1 2:3:4' day to second - interval_day_time(str2) +from vector_interval_1 order by dt; + +select + dt, + interval '1 2:3:4' day to second + interval '1 2:3:4' day to second, + interval_day_time(str2) + interval_day_time(str2), + interval '1 2:3:4' day to second + interval_day_time(str2), + interval '1 2:3:4' day to second - interval '1 2:3:4' day to second, + interval_day_time(str2) - interval_day_time(str2), + interval '1 2:3:4' day to second - interval_day_time(str2) +from vector_interval_1 order by dt; + + +-- date-interval arithmetic +explain +select + dt, + dt + interval '1-2' year to month, + dt + interval_year_month(str1), + interval '1-2' year to month + dt, + interval_year_month(str1) + dt, + dt - interval '1-2' year to month, + dt - interval_year_month(str1), + dt + interval '1 2:3:4' day to second, + dt + interval_day_time(str2), + interval '1 2:3:4' day to second + dt, + interval_day_time(str2) + dt, + dt - interval '1 2:3:4' day to second, + dt - interval_day_time(str2) +from vector_interval_1 order by dt; + +select + dt, + dt + interval '1-2' year to month, + dt + interval_year_month(str1), + interval '1-2' year to month + dt, + interval_year_month(str1) + dt, + dt - interval '1-2' year to month, + dt - interval_year_month(str1), + dt + interval '1 2:3:4' day to second, + dt + interval_day_time(str2), + interval '1 2:3:4' day to second + dt, + interval_day_time(str2) + dt, + dt - interval '1 2:3:4' day to second, + dt - interval_day_time(str2) +from vector_interval_1 order by dt; + + +-- timestamp-interval arithmetic +explain +select + ts, + ts + interval '1-2' year to month, + ts + interval_year_month(str1), + interval '1-2' year to month + ts, + interval_year_month(str1) + ts, + ts - interval '1-2' year to month, + ts - interval_year_month(str1), + ts + interval '1 2:3:4' day to second, + ts + interval_day_time(str2), + interval '1 2:3:4' day to second + ts, + interval_day_time(str2) + ts, + ts - interval '1 2:3:4' day to second, + ts - interval_day_time(str2) +from vector_interval_1 order by ts; + +select + ts, + ts + interval '1-2' year to month, + ts + interval_year_month(str1), + interval '1-2' year to month + ts, + interval_year_month(str1) + ts, + ts - interval '1-2' year to month, + ts - interval_year_month(str1), + ts + interval '1 2:3:4' day to second, + ts + interval_day_time(str2), + interval '1 2:3:4' day to second + ts, + interval_day_time(str2) + ts, + ts - interval '1 2:3:4' day to second, + ts - interval_day_time(str2) +from vector_interval_1 order by ts; + + +-- timestamp-timestamp arithmetic +explain +select + ts, + ts - ts, + timestamp '2001-01-01 01:02:03' - ts, + ts - timestamp '2001-01-01 01:02:03' +from vector_interval_1 order by ts; + +select + ts, + ts - ts, + timestamp '2001-01-01 01:02:03' - ts, + ts - timestamp '2001-01-01 01:02:03' +from vector_interval_1 order by ts; + + +-- date-date arithmetic +explain +select + dt, + dt - dt, + date '2001-01-01' - dt, + dt - date '2001-01-01' +from vector_interval_1 order by dt; + +select + dt, + dt - dt, + date '2001-01-01' - dt, + dt - date '2001-01-01' +from vector_interval_1 order by dt; + + +-- date-timestamp arithmetic +explain +select + dt, + ts - dt, + timestamp '2001-01-01 01:02:03' - dt, + ts - date '2001-01-01', + dt - ts, + dt - timestamp '2001-01-01 01:02:03', + date '2001-01-01' - ts +from vector_interval_1 order by dt; + +select + dt, + ts - dt, + timestamp '2001-01-01 01:02:03' - dt, + ts - date '2001-01-01', + dt - ts, + dt - timestamp '2001-01-01 01:02:03', + date '2001-01-01' - ts +from vector_interval_1 order by dt; diff --git a/ql/src/test/queries/clientpositive/vector_interval_2.q b/ql/src/test/queries/clientpositive/vector_interval_2.q new file mode 100644 index 0000000..467e5f2 --- /dev/null +++ b/ql/src/test/queries/clientpositive/vector_interval_2.q @@ -0,0 +1,530 @@ +set hive.vectorized.execution.enabled=true; +set hive.fetch.task.conversion=minimal; + +drop table if exists vector_interval_2; +create table vector_interval_2 (ts timestamp, dt date, str1 string, str2 string, str3 string, str4 string) stored as orc; + +insert into vector_interval_2 + select timestamp '2001-01-01 01:02:03', date '2001-01-01', '1-2', '1-3', '1 2:3:4', '1 2:3:5' from src limit 1; +insert into vector_interval_2 + select null, null, null, null, null, null from src limit 1; + + +-- interval comparisons in select clause + +explain +select + str1, + -- Should all be true + interval_year_month(str1) = interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str2), + interval_year_month(str1) < interval_year_month(str2), + interval_year_month(str1) >= interval_year_month(str1), + interval_year_month(str2) >= interval_year_month(str1), + interval_year_month(str2) > interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str2), + + interval_year_month(str1) = interval '1-2' year to month, + interval_year_month(str1) <= interval '1-2' year to month, + interval_year_month(str1) <= interval '1-3' year to month, + interval_year_month(str1) < interval '1-3' year to month, + interval_year_month(str1) >= interval '1-2' year to month, + interval_year_month(str2) >= interval '1-2' year to month, + interval_year_month(str2) > interval '1-2' year to month, + interval_year_month(str1) != interval '1-3' year to month, + + interval '1-2' year to month = interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str2), + interval '1-2' year to month < interval_year_month(str2), + interval '1-2' year to month >= interval_year_month(str1), + interval '1-3' year to month >= interval_year_month(str1), + interval '1-3' year to month > interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str2) +from vector_interval_2 order by str1; + +select + str1, + -- Should all be true + interval_year_month(str1) = interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str2), + interval_year_month(str1) < interval_year_month(str2), + interval_year_month(str1) >= interval_year_month(str1), + interval_year_month(str2) >= interval_year_month(str1), + interval_year_month(str2) > interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str2), + + interval_year_month(str1) = interval '1-2' year to month, + interval_year_month(str1) <= interval '1-2' year to month, + interval_year_month(str1) <= interval '1-3' year to month, + interval_year_month(str1) < interval '1-3' year to month, + interval_year_month(str1) >= interval '1-2' year to month, + interval_year_month(str2) >= interval '1-2' year to month, + interval_year_month(str2) > interval '1-2' year to month, + interval_year_month(str1) != interval '1-3' year to month, + + interval '1-2' year to month = interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str2), + interval '1-2' year to month < interval_year_month(str2), + interval '1-2' year to month >= interval_year_month(str1), + interval '1-3' year to month >= interval_year_month(str1), + interval '1-3' year to month > interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str2) +from vector_interval_2 order by str1; + +explain +select + str1, + -- Should all be false + interval_year_month(str1) != interval_year_month(str1), + interval_year_month(str1) >= interval_year_month(str2), + interval_year_month(str1) > interval_year_month(str2), + interval_year_month(str2) <= interval_year_month(str1), + interval_year_month(str2) < interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str1), + + interval_year_month(str1) != interval '1-2' year to month, + interval_year_month(str1) >= interval '1-3' year to month, + interval_year_month(str1) > interval '1-3' year to month, + interval_year_month(str2) <= interval '1-2' year to month, + interval_year_month(str2) < interval '1-2' year to month, + interval_year_month(str1) != interval '1-2' year to month, + + interval '1-2' year to month != interval_year_month(str1), + interval '1-2' year to month >= interval_year_month(str2), + interval '1-2' year to month > interval_year_month(str2), + interval '1-3' year to month <= interval_year_month(str1), + interval '1-3' year to month < interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str1) +from vector_interval_2 order by str1; + +select + str1, + -- Should all be false + interval_year_month(str1) != interval_year_month(str1), + interval_year_month(str1) >= interval_year_month(str2), + interval_year_month(str1) > interval_year_month(str2), + interval_year_month(str2) <= interval_year_month(str1), + interval_year_month(str2) < interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str1), + + interval_year_month(str1) != interval '1-2' year to month, + interval_year_month(str1) >= interval '1-3' year to month, + interval_year_month(str1) > interval '1-3' year to month, + interval_year_month(str2) <= interval '1-2' year to month, + interval_year_month(str2) < interval '1-2' year to month, + interval_year_month(str1) != interval '1-2' year to month, + + interval '1-2' year to month != interval_year_month(str1), + interval '1-2' year to month >= interval_year_month(str2), + interval '1-2' year to month > interval_year_month(str2), + interval '1-3' year to month <= interval_year_month(str1), + interval '1-3' year to month < interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str1) +from vector_interval_2 order by str1; + +explain +select + str3, + -- Should all be true + interval_day_time(str3) = interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str4), + interval_day_time(str3) < interval_day_time(str4), + interval_day_time(str3) >= interval_day_time(str3), + interval_day_time(str4) >= interval_day_time(str3), + interval_day_time(str4) > interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str4), + + interval_day_time(str3) = interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:5' day to second, + interval_day_time(str3) < interval '1 2:3:5' day to second, + interval_day_time(str3) >= interval '1 2:3:4' day to second, + interval_day_time(str4) >= interval '1 2:3:4' day to second, + interval_day_time(str4) > interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:5' day to second, + + interval '1 2:3:4' day to second = interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str4), + interval '1 2:3:4' day to second < interval_day_time(str4), + interval '1 2:3:4' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second > interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str4) +from vector_interval_2 order by str3; + +select + str3, + -- Should all be true + interval_day_time(str3) = interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str4), + interval_day_time(str3) < interval_day_time(str4), + interval_day_time(str3) >= interval_day_time(str3), + interval_day_time(str4) >= interval_day_time(str3), + interval_day_time(str4) > interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str4), + + interval_day_time(str3) = interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:5' day to second, + interval_day_time(str3) < interval '1 2:3:5' day to second, + interval_day_time(str3) >= interval '1 2:3:4' day to second, + interval_day_time(str4) >= interval '1 2:3:4' day to second, + interval_day_time(str4) > interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:5' day to second, + + interval '1 2:3:4' day to second = interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str4), + interval '1 2:3:4' day to second < interval_day_time(str4), + interval '1 2:3:4' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second > interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str4) +from vector_interval_2 order by str3; + +explain +select + str3, + -- Should all be false + interval_day_time(str3) != interval_day_time(str3), + interval_day_time(str3) >= interval_day_time(str4), + interval_day_time(str3) > interval_day_time(str4), + interval_day_time(str4) <= interval_day_time(str3), + interval_day_time(str4) < interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str3), + + interval_day_time(str3) != interval '1 2:3:4' day to second, + interval_day_time(str3) >= interval '1 2:3:5' day to second, + interval_day_time(str3) > interval '1 2:3:5' day to second, + interval_day_time(str4) <= interval '1 2:3:4' day to second, + interval_day_time(str4) < interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:4' day to second, + + interval '1 2:3:4' day to second != interval_day_time(str3), + interval '1 2:3:4' day to second >= interval_day_time(str4), + interval '1 2:3:4' day to second > interval_day_time(str4), + interval '1 2:3:5' day to second <= interval_day_time(str3), + interval '1 2:3:5' day to second < interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str3) +from vector_interval_2 order by str3; + +select + str3, + -- Should all be false + interval_day_time(str3) != interval_day_time(str3), + interval_day_time(str3) >= interval_day_time(str4), + interval_day_time(str3) > interval_day_time(str4), + interval_day_time(str4) <= interval_day_time(str3), + interval_day_time(str4) < interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str3), + + interval_day_time(str3) != interval '1 2:3:4' day to second, + interval_day_time(str3) >= interval '1 2:3:5' day to second, + interval_day_time(str3) > interval '1 2:3:5' day to second, + interval_day_time(str4) <= interval '1 2:3:4' day to second, + interval_day_time(str4) < interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:4' day to second, + + interval '1 2:3:4' day to second != interval_day_time(str3), + interval '1 2:3:4' day to second >= interval_day_time(str4), + interval '1 2:3:4' day to second > interval_day_time(str4), + interval '1 2:3:5' day to second <= interval_day_time(str3), + interval '1 2:3:5' day to second < interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str3) +from vector_interval_2 order by str3; + + +-- interval expressions in predicates +explain +select ts from vector_interval_2 +where + interval_year_month(str1) = interval_year_month(str1) + and interval_year_month(str1) != interval_year_month(str2) + and interval_year_month(str1) <= interval_year_month(str2) + and interval_year_month(str1) < interval_year_month(str2) + and interval_year_month(str2) >= interval_year_month(str1) + and interval_year_month(str2) > interval_year_month(str1) + + and interval_year_month(str1) = interval '1-2' year to month + and interval_year_month(str1) != interval '1-3' year to month + and interval_year_month(str1) <= interval '1-3' year to month + and interval_year_month(str1) < interval '1-3' year to month + and interval_year_month(str2) >= interval '1-2' year to month + and interval_year_month(str2) > interval '1-2' year to month + + and interval '1-2' year to month = interval_year_month(str1) + and interval '1-2' year to month != interval_year_month(str2) + and interval '1-2' year to month <= interval_year_month(str2) + and interval '1-2' year to month < interval_year_month(str2) + and interval '1-3' year to month >= interval_year_month(str1) + and interval '1-3' year to month > interval_year_month(str1) +order by ts; + +select ts from vector_interval_2 +where + interval_year_month(str1) = interval_year_month(str1) + and interval_year_month(str1) != interval_year_month(str2) + and interval_year_month(str1) <= interval_year_month(str2) + and interval_year_month(str1) < interval_year_month(str2) + and interval_year_month(str2) >= interval_year_month(str1) + and interval_year_month(str2) > interval_year_month(str1) + + and interval_year_month(str1) = interval '1-2' year to month + and interval_year_month(str1) != interval '1-3' year to month + and interval_year_month(str1) <= interval '1-3' year to month + and interval_year_month(str1) < interval '1-3' year to month + and interval_year_month(str2) >= interval '1-2' year to month + and interval_year_month(str2) > interval '1-2' year to month + + and interval '1-2' year to month = interval_year_month(str1) + and interval '1-2' year to month != interval_year_month(str2) + and interval '1-2' year to month <= interval_year_month(str2) + and interval '1-2' year to month < interval_year_month(str2) + and interval '1-3' year to month >= interval_year_month(str1) + and interval '1-3' year to month > interval_year_month(str1) +order by ts; + +explain +select ts from vector_interval_2 +where + interval_day_time(str3) = interval_day_time(str3) + and interval_day_time(str3) != interval_day_time(str4) + and interval_day_time(str3) <= interval_day_time(str4) + and interval_day_time(str3) < interval_day_time(str4) + and interval_day_time(str4) >= interval_day_time(str3) + and interval_day_time(str4) > interval_day_time(str3) + + and interval_day_time(str3) = interval '1 2:3:4' day to second + and interval_day_time(str3) != interval '1 2:3:5' day to second + and interval_day_time(str3) <= interval '1 2:3:5' day to second + and interval_day_time(str3) < interval '1 2:3:5' day to second + and interval_day_time(str4) >= interval '1 2:3:4' day to second + and interval_day_time(str4) > interval '1 2:3:4' day to second + + and interval '1 2:3:4' day to second = interval_day_time(str3) + and interval '1 2:3:4' day to second != interval_day_time(str4) + and interval '1 2:3:4' day to second <= interval_day_time(str4) + and interval '1 2:3:4' day to second < interval_day_time(str4) + and interval '1 2:3:5' day to second >= interval_day_time(str3) + and interval '1 2:3:5' day to second > interval_day_time(str3) +order by ts; + +select ts from vector_interval_2 +where + interval_day_time(str3) = interval_day_time(str3) + and interval_day_time(str3) != interval_day_time(str4) + and interval_day_time(str3) <= interval_day_time(str4) + and interval_day_time(str3) < interval_day_time(str4) + and interval_day_time(str4) >= interval_day_time(str3) + and interval_day_time(str4) > interval_day_time(str3) + + and interval_day_time(str3) = interval '1 2:3:4' day to second + and interval_day_time(str3) != interval '1 2:3:5' day to second + and interval_day_time(str3) <= interval '1 2:3:5' day to second + and interval_day_time(str3) < interval '1 2:3:5' day to second + and interval_day_time(str4) >= interval '1 2:3:4' day to second + and interval_day_time(str4) > interval '1 2:3:4' day to second + + and interval '1 2:3:4' day to second = interval_day_time(str3) + and interval '1 2:3:4' day to second != interval_day_time(str4) + and interval '1 2:3:4' day to second <= interval_day_time(str4) + and interval '1 2:3:4' day to second < interval_day_time(str4) + and interval '1 2:3:5' day to second >= interval_day_time(str3) + and interval '1 2:3:5' day to second > interval_day_time(str3) +order by ts; + +explain +select ts from vector_interval_2 +where + date '2002-03-01' = dt + interval_year_month(str1) + and date '2002-03-01' <= dt + interval_year_month(str1) + and date '2002-03-01' >= dt + interval_year_month(str1) + and dt + interval_year_month(str1) = date '2002-03-01' + and dt + interval_year_month(str1) <= date '2002-03-01' + and dt + interval_year_month(str1) >= date '2002-03-01' + and dt != dt + interval_year_month(str1) + + and date '2002-03-01' = dt + interval '1-2' year to month + and date '2002-03-01' <= dt + interval '1-2' year to month + and date '2002-03-01' >= dt + interval '1-2' year to month + and dt + interval '1-2' year to month = date '2002-03-01' + and dt + interval '1-2' year to month <= date '2002-03-01' + and dt + interval '1-2' year to month >= date '2002-03-01' + and dt != dt + interval '1-2' year to month +order by ts; + +select ts from vector_interval_2 +where + date '2002-03-01' = dt + interval_year_month(str1) + and date '2002-03-01' <= dt + interval_year_month(str1) + and date '2002-03-01' >= dt + interval_year_month(str1) + and dt + interval_year_month(str1) = date '2002-03-01' + and dt + interval_year_month(str1) <= date '2002-03-01' + and dt + interval_year_month(str1) >= date '2002-03-01' + and dt != dt + interval_year_month(str1) + + and date '2002-03-01' = dt + interval '1-2' year to month + and date '2002-03-01' <= dt + interval '1-2' year to month + and date '2002-03-01' >= dt + interval '1-2' year to month + and dt + interval '1-2' year to month = date '2002-03-01' + and dt + interval '1-2' year to month <= date '2002-03-01' + and dt + interval '1-2' year to month >= date '2002-03-01' + and dt != dt + interval '1-2' year to month +order by ts; + +explain +select ts from vector_interval_2 +where + timestamp '2002-03-01 01:02:03' = ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' <= ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' >= ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' != ts + interval '1-2' year to month + and timestamp '2002-02-01 01:02:03' < ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' > ts + interval '1-2' year to month + + and ts + interval '1-2' year to month = timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month >= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month <= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month != timestamp '2002-04-01 01:02:03' + and ts + interval '1-2' year to month > timestamp '2002-02-01 01:02:03' + and ts + interval '1-2' year to month < timestamp '2002-04-01 01:02:03' + + and ts = ts + interval '0' year + and ts != ts + interval '1' year + and ts <= ts + interval '1' year + and ts < ts + interval '1' year + and ts >= ts - interval '1' year + and ts > ts - interval '1' year +order by ts; + +select ts from vector_interval_2 +where + timestamp '2002-03-01 01:02:03' = ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' <= ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' >= ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' != ts + interval '1-2' year to month + and timestamp '2002-02-01 01:02:03' < ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' > ts + interval '1-2' year to month + + and ts + interval '1-2' year to month = timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month >= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month <= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month != timestamp '2002-04-01 01:02:03' + and ts + interval '1-2' year to month > timestamp '2002-02-01 01:02:03' + and ts + interval '1-2' year to month < timestamp '2002-04-01 01:02:03' + + and ts = ts + interval '0' year + and ts != ts + interval '1' year + and ts <= ts + interval '1' year + and ts < ts + interval '1' year + and ts >= ts - interval '1' year + and ts > ts - interval '1' year +order by ts; + +-- day to second expressions in predicate +explain +select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' != dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' <= dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' < dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' >= dt - interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' > dt - interval '0 1:2:4' day to second + + and dt + interval '0 1:2:3' day to second = timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second != timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:3' day to second >= timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second > timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:3' day to second <= timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:4' day to second < timestamp '2001-01-01 01:02:03' + + and ts = dt + interval '0 1:2:3' day to second + and ts != dt + interval '0 1:2:4' day to second + and ts <= dt + interval '0 1:2:3' day to second + and ts < dt + interval '0 1:2:4' day to second + and ts >= dt - interval '0 1:2:3' day to second + and ts > dt - interval '0 1:2:4' day to second +order by ts; + +select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' != dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' <= dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' < dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' >= dt - interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' > dt - interval '0 1:2:4' day to second + + and dt + interval '0 1:2:3' day to second = timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second != timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:3' day to second >= timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second > timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:3' day to second <= timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:4' day to second < timestamp '2001-01-01 01:02:03' + + and ts = dt + interval '0 1:2:3' day to second + and ts != dt + interval '0 1:2:4' day to second + and ts <= dt + interval '0 1:2:3' day to second + and ts < dt + interval '0 1:2:4' day to second + and ts >= dt - interval '0 1:2:3' day to second + and ts > dt - interval '0 1:2:4' day to second +order by ts; + +explain +select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = ts + interval '0' day + and timestamp '2001-01-01 01:02:03' != ts + interval '1' day + and timestamp '2001-01-01 01:02:03' <= ts + interval '1' day + and timestamp '2001-01-01 01:02:03' < ts + interval '1' day + and timestamp '2001-01-01 01:02:03' >= ts - interval '1' day + and timestamp '2001-01-01 01:02:03' > ts - interval '1' day + + and ts + interval '0' day = timestamp '2001-01-01 01:02:03' + and ts + interval '1' day != timestamp '2001-01-01 01:02:03' + and ts + interval '1' day >= timestamp '2001-01-01 01:02:03' + and ts + interval '1' day > timestamp '2001-01-01 01:02:03' + and ts - interval '1' day <= timestamp '2001-01-01 01:02:03' + and ts - interval '1' day < timestamp '2001-01-01 01:02:03' + + and ts = ts + interval '0' day + and ts != ts + interval '1' day + and ts <= ts + interval '1' day + and ts < ts + interval '1' day + and ts >= ts - interval '1' day + and ts > ts - interval '1' day +order by ts; + +select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = ts + interval '0' day + and timestamp '2001-01-01 01:02:03' != ts + interval '1' day + and timestamp '2001-01-01 01:02:03' <= ts + interval '1' day + and timestamp '2001-01-01 01:02:03' < ts + interval '1' day + and timestamp '2001-01-01 01:02:03' >= ts - interval '1' day + and timestamp '2001-01-01 01:02:03' > ts - interval '1' day + + and ts + interval '0' day = timestamp '2001-01-01 01:02:03' + and ts + interval '1' day != timestamp '2001-01-01 01:02:03' + and ts + interval '1' day >= timestamp '2001-01-01 01:02:03' + and ts + interval '1' day > timestamp '2001-01-01 01:02:03' + and ts - interval '1' day <= timestamp '2001-01-01 01:02:03' + and ts - interval '1' day < timestamp '2001-01-01 01:02:03' + + and ts = ts + interval '0' day + and ts != ts + interval '1' day + and ts <= ts + interval '1' day + and ts < ts + interval '1' day + and ts >= ts - interval '1' day + and ts > ts - interval '1' day +order by ts; + +drop table vector_interval_2; diff --git a/ql/src/test/results/clientpositive/vector_date_1.q.out b/ql/src/test/results/clientpositive/vector_date_1.q.out new file mode 100644 index 0000000..5ed7424 --- /dev/null +++ b/ql/src/test/results/clientpositive/vector_date_1.q.out @@ -0,0 +1,677 @@ +PREHOOK: query: drop table if exists vector_date_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists vector_date_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table vector_date_1 (dt1 date, dt2 date) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_date_1 +POSTHOOK: query: create table vector_date_1 (dt1 date, dt2 date) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_date_1 +PREHOOK: query: insert into table vector_date_1 + select null, null from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@vector_date_1 +POSTHOOK: query: insert into table vector_date_1 + select null, null from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@vector_date_1 +POSTHOOK: Lineage: vector_date_1.dt1 EXPRESSION [] +POSTHOOK: Lineage: vector_date_1.dt2 EXPRESSION [] +PREHOOK: query: insert into table vector_date_1 + select date '1999-12-31', date '2000-01-01' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@vector_date_1 +POSTHOOK: query: insert into table vector_date_1 + select date '1999-12-31', date '2000-01-01' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@vector_date_1 +POSTHOOK: Lineage: vector_date_1.dt1 SIMPLE [] +POSTHOOK: Lineage: vector_date_1.dt2 SIMPLE [] +PREHOOK: query: insert into table vector_date_1 + select date '2001-01-01', date '2001-06-01' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@vector_date_1 +POSTHOOK: query: insert into table vector_date_1 + select date '2001-01-01', date '2001-06-01' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@vector_date_1 +POSTHOOK: Lineage: vector_date_1.dt1 SIMPLE [] +POSTHOOK: Lineage: vector_date_1.dt2 SIMPLE [] +PREHOOK: query: -- column-to-column comparison in select clause +explain +select + dt1, dt2, + -- should be all true + dt1 = dt1, + dt1 != dt2, + dt1 <= dt1, + dt1 <= dt2, + dt1 < dt2, + dt2 >= dt2, + dt2 >= dt1, + dt2 > dt1 +from vector_date_1 order by dt1 +PREHOOK: type: QUERY +POSTHOOK: query: -- column-to-column comparison in select clause +explain +select + dt1, dt2, + -- should be all true + dt1 = dt1, + dt1 != dt2, + dt1 <= dt1, + dt1 <= dt2, + dt1 < dt2, + dt2 >= dt2, + dt2 >= dt1, + dt2 > dt1 +from vector_date_1 order by dt1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_date_1 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dt1 (type: date), dt2 (type: date), (dt1 = dt1) (type: boolean), (dt1 <> dt2) (type: boolean), (dt1 <= dt1) (type: boolean), (dt1 <= dt2) (type: boolean), (dt1 < dt2) (type: boolean), (dt2 >= dt2) (type: boolean), (dt2 >= dt1) (type: boolean), (dt2 > dt1) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: date), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean), VALUE._col8 (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt1, dt2, + -- should be all true + dt1 = dt1, + dt1 != dt2, + dt1 <= dt1, + dt1 <= dt2, + dt1 < dt2, + dt2 >= dt2, + dt2 >= dt1, + dt2 > dt1 +from vector_date_1 order by dt1 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt1, dt2, + -- should be all true + dt1 = dt1, + dt1 != dt2, + dt1 <= dt1, + dt1 <= dt2, + dt1 < dt2, + dt2 >= dt2, + dt2 >= dt1, + dt2 > dt1 +from vector_date_1 order by dt1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +1999-12-31 2000-01-01 true true true true true true true true +2001-01-01 2001-06-01 true true true true true true true true +PREHOOK: query: explain +select + dt1, dt2, + -- should be all false + dt1 != dt1, + dt1 = dt2, + dt1 < dt1, + dt1 >= dt2, + dt1 > dt2, + dt2 > dt2, + dt2 <= dt1, + dt2 < dt1 +from vector_date_1 order by dt1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + dt1, dt2, + -- should be all false + dt1 != dt1, + dt1 = dt2, + dt1 < dt1, + dt1 >= dt2, + dt1 > dt2, + dt2 > dt2, + dt2 <= dt1, + dt2 < dt1 +from vector_date_1 order by dt1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_date_1 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dt1 (type: date), dt2 (type: date), (dt1 <> dt1) (type: boolean), (dt1 = dt2) (type: boolean), (dt1 < dt1) (type: boolean), (dt1 >= dt2) (type: boolean), (dt1 > dt2) (type: boolean), (dt2 > dt2) (type: boolean), (dt2 <= dt1) (type: boolean), (dt2 < dt1) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: date), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean), VALUE._col8 (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt1, dt2, + -- should be all false + dt1 != dt1, + dt1 = dt2, + dt1 < dt1, + dt1 >= dt2, + dt1 > dt2, + dt2 > dt2, + dt2 <= dt1, + dt2 < dt1 +from vector_date_1 order by dt1 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt1, dt2, + -- should be all false + dt1 != dt1, + dt1 = dt2, + dt1 < dt1, + dt1 >= dt2, + dt1 > dt2, + dt2 > dt2, + dt2 <= dt1, + dt2 < dt1 +from vector_date_1 order by dt1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +1999-12-31 2000-01-01 false false false false false false false false +2001-01-01 2001-06-01 false false false false false false false false +PREHOOK: query: -- column-to-literal/literal-to-column comparison in select clause +explain +select + dt1, + -- should be all true + dt1 != date '1970-01-01', + dt1 >= date '1970-01-01', + dt1 > date '1970-01-01', + dt1 <= date '2100-01-01', + dt1 < date '2100-01-01', + date '1970-01-01' != dt1, + date '1970-01-01' <= dt1, + date '1970-01-01' < dt1 +from vector_date_1 order by dt1 +PREHOOK: type: QUERY +POSTHOOK: query: -- column-to-literal/literal-to-column comparison in select clause +explain +select + dt1, + -- should be all true + dt1 != date '1970-01-01', + dt1 >= date '1970-01-01', + dt1 > date '1970-01-01', + dt1 <= date '2100-01-01', + dt1 < date '2100-01-01', + date '1970-01-01' != dt1, + date '1970-01-01' <= dt1, + date '1970-01-01' < dt1 +from vector_date_1 order by dt1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_date_1 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dt1 (type: date), (dt1 <> 1970-01-01) (type: boolean), (dt1 >= 1970-01-01) (type: boolean), (dt1 > 1970-01-01) (type: boolean), (dt1 <= 2100-01-01) (type: boolean), (dt1 < 2100-01-01) (type: boolean), (1970-01-01 <> dt1) (type: boolean), (1970-01-01 <= dt1) (type: boolean), (1970-01-01 < dt1) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: boolean), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt1, + -- should be all true + dt1 != date '1970-01-01', + dt1 >= date '1970-01-01', + dt1 > date '1970-01-01', + dt1 <= date '2100-01-01', + dt1 < date '2100-01-01', + date '1970-01-01' != dt1, + date '1970-01-01' <= dt1, + date '1970-01-01' < dt1 +from vector_date_1 order by dt1 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt1, + -- should be all true + dt1 != date '1970-01-01', + dt1 >= date '1970-01-01', + dt1 > date '1970-01-01', + dt1 <= date '2100-01-01', + dt1 < date '2100-01-01', + date '1970-01-01' != dt1, + date '1970-01-01' <= dt1, + date '1970-01-01' < dt1 +from vector_date_1 order by dt1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL NULL NULL +1999-12-31 true true true true true true true true +2001-01-01 true true true true true true true true +PREHOOK: query: explain +select + dt1, + -- should all be false + dt1 = date '1970-01-01', + dt1 <= date '1970-01-01', + dt1 < date '1970-01-01', + dt1 >= date '2100-01-01', + dt1 > date '2100-01-01', + date '1970-01-01' = dt1, + date '1970-01-01' >= dt1, + date '1970-01-01' > dt1 +from vector_date_1 order by dt1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + dt1, + -- should all be false + dt1 = date '1970-01-01', + dt1 <= date '1970-01-01', + dt1 < date '1970-01-01', + dt1 >= date '2100-01-01', + dt1 > date '2100-01-01', + date '1970-01-01' = dt1, + date '1970-01-01' >= dt1, + date '1970-01-01' > dt1 +from vector_date_1 order by dt1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_date_1 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dt1 (type: date), (dt1 = 1970-01-01) (type: boolean), (dt1 <= 1970-01-01) (type: boolean), (dt1 < 1970-01-01) (type: boolean), (dt1 >= 2100-01-01) (type: boolean), (dt1 > 2100-01-01) (type: boolean), (1970-01-01 = dt1) (type: boolean), (1970-01-01 >= dt1) (type: boolean), (1970-01-01 > dt1) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: boolean), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt1, + -- should all be false + dt1 = date '1970-01-01', + dt1 <= date '1970-01-01', + dt1 < date '1970-01-01', + dt1 >= date '2100-01-01', + dt1 > date '2100-01-01', + date '1970-01-01' = dt1, + date '1970-01-01' >= dt1, + date '1970-01-01' > dt1 +from vector_date_1 order by dt1 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt1, + -- should all be false + dt1 = date '1970-01-01', + dt1 <= date '1970-01-01', + dt1 < date '1970-01-01', + dt1 >= date '2100-01-01', + dt1 > date '2100-01-01', + date '1970-01-01' = dt1, + date '1970-01-01' >= dt1, + date '1970-01-01' > dt1 +from vector_date_1 order by dt1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL NULL NULL +1999-12-31 false false false false false false false false +2001-01-01 false false false false false false false false +PREHOOK: query: -- column-to-column comparisons in predicate +-- all rows with non-null dt1 should be returned +explain +select + dt1, dt2 +from vector_date_1 +where + dt1 = dt1 + and dt1 != dt2 + and dt1 < dt2 + and dt1 <= dt2 + and dt2 > dt1 + and dt2 >= dt1 +order by dt1 +PREHOOK: type: QUERY +POSTHOOK: query: -- column-to-column comparisons in predicate +-- all rows with non-null dt1 should be returned +explain +select + dt1, dt2 +from vector_date_1 +where + dt1 = dt1 + and dt1 != dt2 + and dt1 < dt2 + and dt1 <= dt2 + and dt2 > dt1 + and dt2 >= dt1 +order by dt1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_date_1 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((((((dt1 = dt1) and (dt1 <> dt2)) and (dt1 < dt2)) and (dt1 <= dt2)) and (dt2 > dt1)) and (dt2 >= dt1)) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: dt1 (type: date), dt2 (type: date) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + value expressions: _col1 (type: date) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: date) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt1, dt2 +from vector_date_1 +where + dt1 = dt1 + and dt1 != dt2 + and dt1 < dt2 + and dt1 <= dt2 + and dt2 > dt1 + and dt2 >= dt1 +order by dt1 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt1, dt2 +from vector_date_1 +where + dt1 = dt1 + and dt1 != dt2 + and dt1 < dt2 + and dt1 <= dt2 + and dt2 > dt1 + and dt2 >= dt1 +order by dt1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +1999-12-31 2000-01-01 +2001-01-01 2001-06-01 +PREHOOK: query: -- column-to-literal/literal-to-column comparison in predicate +-- only a single row should be returned +explain +select + dt1, dt2 +from vector_date_1 +where + dt1 = date '2001-01-01' + and date '2001-01-01' = dt1 + and dt1 != date '1970-01-01' + and date '1970-01-01' != dt1 + and dt1 > date '1970-01-01' + and dt1 >= date '1970-01-01' + and date '1970-01-01' < dt1 + and date '1970-01-01' <= dt1 +order by dt1 +PREHOOK: type: QUERY +POSTHOOK: query: -- column-to-literal/literal-to-column comparison in predicate +-- only a single row should be returned +explain +select + dt1, dt2 +from vector_date_1 +where + dt1 = date '2001-01-01' + and date '2001-01-01' = dt1 + and dt1 != date '1970-01-01' + and date '1970-01-01' != dt1 + and dt1 > date '1970-01-01' + and dt1 >= date '1970-01-01' + and date '1970-01-01' < dt1 + and date '1970-01-01' <= dt1 +order by dt1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_date_1 + Statistics: Num rows: 3 Data size: 224 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((((((((dt1 = 2001-01-01) and (2001-01-01 = dt1)) and (dt1 <> 1970-01-01)) and (1970-01-01 <> dt1)) and (dt1 > 1970-01-01)) and (dt1 >= 1970-01-01)) and (1970-01-01 < dt1)) and (1970-01-01 <= dt1)) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: dt2 (type: date) + outputColumnNames: _col1 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Reduce Output Operator + key expressions: 2001-01-01 (type: date) + sort order: + + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + value expressions: _col1 (type: date) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: 2001-01-01 (type: date), VALUE._col0 (type: date) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt1, dt2 +from vector_date_1 +where + dt1 = date '2001-01-01' + and date '2001-01-01' = dt1 + and dt1 != date '1970-01-01' + and date '1970-01-01' != dt1 + and dt1 > date '1970-01-01' + and dt1 >= date '1970-01-01' + and date '1970-01-01' < dt1 + and date '1970-01-01' <= dt1 +order by dt1 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt1, dt2 +from vector_date_1 +where + dt1 = date '2001-01-01' + and date '2001-01-01' = dt1 + and dt1 != date '1970-01-01' + and date '1970-01-01' != dt1 + and dt1 > date '1970-01-01' + and dt1 >= date '1970-01-01' + and date '1970-01-01' < dt1 + and date '1970-01-01' <= dt1 +order by dt1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_date_1 +#### A masked pattern was here #### +2001-01-01 2001-06-01 +PREHOOK: query: drop table vector_date_1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@vector_date_1 +PREHOOK: Output: default@vector_date_1 +POSTHOOK: query: drop table vector_date_1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@vector_date_1 +POSTHOOK: Output: default@vector_date_1 diff --git a/ql/src/test/results/clientpositive/vector_interval_1.q.out b/ql/src/test/results/clientpositive/vector_interval_1.q.out new file mode 100644 index 0000000..5015916 --- /dev/null +++ b/ql/src/test/results/clientpositive/vector_interval_1.q.out @@ -0,0 +1,766 @@ +PREHOOK: query: drop table if exists vector_interval_1 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists vector_interval_1 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table vector_interval_1 (ts timestamp, dt date, str1 string, str2 string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_interval_1 +POSTHOOK: query: create table vector_interval_1 (ts timestamp, dt date, str1 string, str2 string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_interval_1 +PREHOOK: query: insert into vector_interval_1 + select timestamp '2001-01-01 01:02:03', date '2001-01-01', '1-2', '1 2:3:4' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@vector_interval_1 +POSTHOOK: query: insert into vector_interval_1 + select timestamp '2001-01-01 01:02:03', date '2001-01-01', '1-2', '1 2:3:4' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@vector_interval_1 +POSTHOOK: Lineage: vector_interval_1.dt SIMPLE [] +POSTHOOK: Lineage: vector_interval_1.str1 SIMPLE [] +POSTHOOK: Lineage: vector_interval_1.str2 SIMPLE [] +POSTHOOK: Lineage: vector_interval_1.ts SIMPLE [] +PREHOOK: query: insert into vector_interval_1 + select null, null, null, null from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@vector_interval_1 +POSTHOOK: query: insert into vector_interval_1 + select null, null, null, null from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@vector_interval_1 +POSTHOOK: Lineage: vector_interval_1.dt EXPRESSION [] +POSTHOOK: Lineage: vector_interval_1.str1 EXPRESSION [] +POSTHOOK: Lineage: vector_interval_1.str2 EXPRESSION [] +POSTHOOK: Lineage: vector_interval_1.ts EXPRESSION [] +PREHOOK: query: -- constants/cast from string +explain +select + str1, + interval '1-2' year to month, interval_year_month(str1), + interval '1 2:3:4' day to second, interval_day_time(str2) +from vector_interval_1 order by str1 +PREHOOK: type: QUERY +POSTHOOK: query: -- constants/cast from string +explain +select + str1, + interval '1-2' year to month, interval_year_month(str1), + interval '1 2:3:4' day to second, interval_day_time(str2) +from vector_interval_1 order by str1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_1 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: str1 (type: string), 1-2 (type: interval_year_month), CAST( str1 AS INTERVAL YEAR TO MONTH) (type: interval_year_month), 1 02:03:04.000000000 (type: interval_day_time), CAST( str2 AS INTERVAL DAY TO SECOND) (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: interval_year_month), _col2 (type: interval_year_month), _col3 (type: interval_day_time), _col4 (type: interval_day_time) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: interval_year_month), VALUE._col1 (type: interval_year_month), VALUE._col2 (type: interval_day_time), VALUE._col3 (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + str1, + interval '1-2' year to month, interval_year_month(str1), + interval '1 2:3:4' day to second, interval_day_time(str2) +from vector_interval_1 order by str1 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +POSTHOOK: query: select + str1, + interval '1-2' year to month, interval_year_month(str1), + interval '1 2:3:4' day to second, interval_day_time(str2) +from vector_interval_1 order by str1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +NULL 1-2 NULL 1 02:03:04.000000000 NULL +1-2 1-2 1-2 1 02:03:04.000000000 1 02:03:04.000000000 +PREHOOK: query: -- interval arithmetic +explain +select + dt, + interval '1-2' year to month + interval '1-2' year to month, + interval_year_month(str1) + interval_year_month(str1), + interval '1-2' year to month + interval_year_month(str1), + interval '1-2' year to month - interval '1-2' year to month, + interval_year_month(str1) - interval_year_month(str1), + interval '1-2' year to month - interval_year_month(str1) +from vector_interval_1 order by dt +PREHOOK: type: QUERY +POSTHOOK: query: -- interval arithmetic +explain +select + dt, + interval '1-2' year to month + interval '1-2' year to month, + interval_year_month(str1) + interval_year_month(str1), + interval '1-2' year to month + interval_year_month(str1), + interval '1-2' year to month - interval '1-2' year to month, + interval_year_month(str1) - interval_year_month(str1), + interval '1-2' year to month - interval_year_month(str1) +from vector_interval_1 order by dt +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_1 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dt (type: date), 2-4 (type: interval_year_month), (CAST( str1 AS INTERVAL YEAR TO MONTH) + CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: interval_year_month), (1-2 + CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: interval_year_month), 0-0 (type: interval_year_month), (CAST( str1 AS INTERVAL YEAR TO MONTH) - CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: interval_year_month), (1-2 - CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: interval_year_month) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: interval_year_month), _col2 (type: interval_year_month), _col3 (type: interval_year_month), _col4 (type: interval_year_month), _col5 (type: interval_year_month), _col6 (type: interval_year_month) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: interval_year_month), VALUE._col1 (type: interval_year_month), VALUE._col2 (type: interval_year_month), VALUE._col3 (type: interval_year_month), VALUE._col4 (type: interval_year_month), VALUE._col5 (type: interval_year_month) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt, + interval '1-2' year to month + interval '1-2' year to month, + interval_year_month(str1) + interval_year_month(str1), + interval '1-2' year to month + interval_year_month(str1), + interval '1-2' year to month - interval '1-2' year to month, + interval_year_month(str1) - interval_year_month(str1), + interval '1-2' year to month - interval_year_month(str1) +from vector_interval_1 order by dt +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt, + interval '1-2' year to month + interval '1-2' year to month, + interval_year_month(str1) + interval_year_month(str1), + interval '1-2' year to month + interval_year_month(str1), + interval '1-2' year to month - interval '1-2' year to month, + interval_year_month(str1) - interval_year_month(str1), + interval '1-2' year to month - interval_year_month(str1) +from vector_interval_1 order by dt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +NULL 2-4 NULL NULL 0-0 NULL NULL +2001-01-01 2-4 2-4 2-4 0-0 0-0 0-0 +PREHOOK: query: explain +select + dt, + interval '1 2:3:4' day to second + interval '1 2:3:4' day to second, + interval_day_time(str2) + interval_day_time(str2), + interval '1 2:3:4' day to second + interval_day_time(str2), + interval '1 2:3:4' day to second - interval '1 2:3:4' day to second, + interval_day_time(str2) - interval_day_time(str2), + interval '1 2:3:4' day to second - interval_day_time(str2) +from vector_interval_1 order by dt +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + dt, + interval '1 2:3:4' day to second + interval '1 2:3:4' day to second, + interval_day_time(str2) + interval_day_time(str2), + interval '1 2:3:4' day to second + interval_day_time(str2), + interval '1 2:3:4' day to second - interval '1 2:3:4' day to second, + interval_day_time(str2) - interval_day_time(str2), + interval '1 2:3:4' day to second - interval_day_time(str2) +from vector_interval_1 order by dt +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_1 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dt (type: date), 2 04:06:08.000000000 (type: interval_day_time), (CAST( str2 AS INTERVAL DAY TO SECOND) + CAST( str2 AS INTERVAL DAY TO SECOND)) (type: interval_day_time), (1 02:03:04.000000000 + CAST( str2 AS INTERVAL DAY TO SECOND)) (type: interval_day_time), 0 00:00:00.000000000 (type: interval_day_time), (CAST( str2 AS INTERVAL DAY TO SECOND) - CAST( str2 AS INTERVAL DAY TO SECOND)) (type: interval_day_time), (1 02:03:04.000000000 - CAST( str2 AS INTERVAL DAY TO SECOND)) (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time), _col5 (type: interval_day_time), _col6 (type: interval_day_time) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time), VALUE._col3 (type: interval_day_time), VALUE._col4 (type: interval_day_time), VALUE._col5 (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt, + interval '1 2:3:4' day to second + interval '1 2:3:4' day to second, + interval_day_time(str2) + interval_day_time(str2), + interval '1 2:3:4' day to second + interval_day_time(str2), + interval '1 2:3:4' day to second - interval '1 2:3:4' day to second, + interval_day_time(str2) - interval_day_time(str2), + interval '1 2:3:4' day to second - interval_day_time(str2) +from vector_interval_1 order by dt +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt, + interval '1 2:3:4' day to second + interval '1 2:3:4' day to second, + interval_day_time(str2) + interval_day_time(str2), + interval '1 2:3:4' day to second + interval_day_time(str2), + interval '1 2:3:4' day to second - interval '1 2:3:4' day to second, + interval_day_time(str2) - interval_day_time(str2), + interval '1 2:3:4' day to second - interval_day_time(str2) +from vector_interval_1 order by dt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +NULL 2 04:06:08.000000000 NULL NULL 0 00:00:00.000000000 NULL NULL +2001-01-01 2 04:06:08.000000000 2 04:06:08.000000000 2 04:06:08.000000000 0 00:00:00.000000000 0 00:00:00.000000000 0 00:00:00.000000000 +PREHOOK: query: -- date-interval arithmetic +explain +select + dt, + dt + interval '1-2' year to month, + dt + interval_year_month(str1), + interval '1-2' year to month + dt, + interval_year_month(str1) + dt, + dt - interval '1-2' year to month, + dt - interval_year_month(str1), + dt + interval '1 2:3:4' day to second, + dt + interval_day_time(str2), + interval '1 2:3:4' day to second + dt, + interval_day_time(str2) + dt, + dt - interval '1 2:3:4' day to second, + dt - interval_day_time(str2) +from vector_interval_1 order by dt +PREHOOK: type: QUERY +POSTHOOK: query: -- date-interval arithmetic +explain +select + dt, + dt + interval '1-2' year to month, + dt + interval_year_month(str1), + interval '1-2' year to month + dt, + interval_year_month(str1) + dt, + dt - interval '1-2' year to month, + dt - interval_year_month(str1), + dt + interval '1 2:3:4' day to second, + dt + interval_day_time(str2), + interval '1 2:3:4' day to second + dt, + interval_day_time(str2) + dt, + dt - interval '1 2:3:4' day to second, + dt - interval_day_time(str2) +from vector_interval_1 order by dt +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_1 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dt (type: date), (dt + 1-2) (type: date), (dt + CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: date), (1-2 + dt) (type: date), (CAST( str1 AS INTERVAL YEAR TO MONTH) + dt) (type: date), (dt - 1-2) (type: date), (dt - CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: date), (dt + 1 02:03:04.000000000) (type: timestamp), (dt + CAST( str2 AS INTERVAL DAY TO SECOND)) (type: timestamp), (1 02:03:04.000000000 + dt) (type: timestamp), (CAST( str2 AS INTERVAL DAY TO SECOND) + dt) (type: timestamp), (dt - 1 02:03:04.000000000) (type: timestamp), (dt - CAST( str2 AS INTERVAL DAY TO SECOND)) (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: date), _col2 (type: date), _col3 (type: date), _col4 (type: date), _col5 (type: date), _col6 (type: date), _col7 (type: timestamp), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: date), VALUE._col1 (type: date), VALUE._col2 (type: date), VALUE._col3 (type: date), VALUE._col4 (type: date), VALUE._col5 (type: date), VALUE._col6 (type: timestamp), VALUE._col7 (type: timestamp), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt, + dt + interval '1-2' year to month, + dt + interval_year_month(str1), + interval '1-2' year to month + dt, + interval_year_month(str1) + dt, + dt - interval '1-2' year to month, + dt - interval_year_month(str1), + dt + interval '1 2:3:4' day to second, + dt + interval_day_time(str2), + interval '1 2:3:4' day to second + dt, + interval_day_time(str2) + dt, + dt - interval '1 2:3:4' day to second, + dt - interval_day_time(str2) +from vector_interval_1 order by dt +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt, + dt + interval '1-2' year to month, + dt + interval_year_month(str1), + interval '1-2' year to month + dt, + interval_year_month(str1) + dt, + dt - interval '1-2' year to month, + dt - interval_year_month(str1), + dt + interval '1 2:3:4' day to second, + dt + interval_day_time(str2), + interval '1 2:3:4' day to second + dt, + interval_day_time(str2) + dt, + dt - interval '1 2:3:4' day to second, + dt - interval_day_time(str2) +from vector_interval_1 order by dt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +2001-01-01 2002-03-01 2002-03-01 2002-03-01 2002-03-01 1999-11-01 1999-11-01 2001-01-02 02:03:04 2001-01-02 02:03:04 2001-01-02 02:03:04 2001-01-02 02:03:04 2000-12-30 21:56:56 2000-12-30 21:56:56 +PREHOOK: query: -- timestamp-interval arithmetic +explain +select + ts, + ts + interval '1-2' year to month, + ts + interval_year_month(str1), + interval '1-2' year to month + ts, + interval_year_month(str1) + ts, + ts - interval '1-2' year to month, + ts - interval_year_month(str1), + ts + interval '1 2:3:4' day to second, + ts + interval_day_time(str2), + interval '1 2:3:4' day to second + ts, + interval_day_time(str2) + ts, + ts - interval '1 2:3:4' day to second, + ts - interval_day_time(str2) +from vector_interval_1 order by ts +PREHOOK: type: QUERY +POSTHOOK: query: -- timestamp-interval arithmetic +explain +select + ts, + ts + interval '1-2' year to month, + ts + interval_year_month(str1), + interval '1-2' year to month + ts, + interval_year_month(str1) + ts, + ts - interval '1-2' year to month, + ts - interval_year_month(str1), + ts + interval '1 2:3:4' day to second, + ts + interval_day_time(str2), + interval '1 2:3:4' day to second + ts, + interval_day_time(str2) + ts, + ts - interval '1 2:3:4' day to second, + ts - interval_day_time(str2) +from vector_interval_1 order by ts +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_1 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ts (type: timestamp), (ts + 1-2) (type: timestamp), (ts + CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: timestamp), (1-2 + ts) (type: timestamp), (CAST( str1 AS INTERVAL YEAR TO MONTH) + ts) (type: timestamp), (ts - 1-2) (type: timestamp), (ts - CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: timestamp), (ts + 1 02:03:04.000000000) (type: timestamp), (ts + CAST( str2 AS INTERVAL DAY TO SECOND)) (type: timestamp), (1 02:03:04.000000000 + ts) (type: timestamp), (CAST( str2 AS INTERVAL DAY TO SECOND) + ts) (type: timestamp), (ts - 1 02:03:04.000000000) (type: timestamp), (ts - CAST( str2 AS INTERVAL DAY TO SECOND)) (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: timestamp), _col2 (type: timestamp), _col3 (type: timestamp), _col4 (type: timestamp), _col5 (type: timestamp), _col6 (type: timestamp), _col7 (type: timestamp), _col8 (type: timestamp), _col9 (type: timestamp), _col10 (type: timestamp), _col11 (type: timestamp), _col12 (type: timestamp) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: timestamp), VALUE._col1 (type: timestamp), VALUE._col2 (type: timestamp), VALUE._col3 (type: timestamp), VALUE._col4 (type: timestamp), VALUE._col5 (type: timestamp), VALUE._col6 (type: timestamp), VALUE._col7 (type: timestamp), VALUE._col8 (type: timestamp), VALUE._col9 (type: timestamp), VALUE._col10 (type: timestamp), VALUE._col11 (type: timestamp) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + ts, + ts + interval '1-2' year to month, + ts + interval_year_month(str1), + interval '1-2' year to month + ts, + interval_year_month(str1) + ts, + ts - interval '1-2' year to month, + ts - interval_year_month(str1), + ts + interval '1 2:3:4' day to second, + ts + interval_day_time(str2), + interval '1 2:3:4' day to second + ts, + interval_day_time(str2) + ts, + ts - interval '1 2:3:4' day to second, + ts - interval_day_time(str2) +from vector_interval_1 order by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +POSTHOOK: query: select + ts, + ts + interval '1-2' year to month, + ts + interval_year_month(str1), + interval '1-2' year to month + ts, + interval_year_month(str1) + ts, + ts - interval '1-2' year to month, + ts - interval_year_month(str1), + ts + interval '1 2:3:4' day to second, + ts + interval_day_time(str2), + interval '1 2:3:4' day to second + ts, + interval_day_time(str2) + ts, + ts - interval '1 2:3:4' day to second, + ts - interval_day_time(str2) +from vector_interval_1 order by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +2001-01-01 01:02:03 2002-03-01 01:02:03 2002-03-01 01:02:03 2002-03-01 01:02:03 2002-03-01 01:02:03 1999-11-01 01:02:03 1999-11-01 01:02:03 2001-01-02 03:05:07 2001-01-02 03:05:07 2001-01-02 03:05:07 2001-01-02 03:05:07 2000-12-30 22:58:59 2000-12-30 22:58:59 +PREHOOK: query: -- timestamp-timestamp arithmetic +explain +select + ts, + ts - ts, + timestamp '2001-01-01 01:02:03' - ts, + ts - timestamp '2001-01-01 01:02:03' +from vector_interval_1 order by ts +PREHOOK: type: QUERY +POSTHOOK: query: -- timestamp-timestamp arithmetic +explain +select + ts, + ts - ts, + timestamp '2001-01-01 01:02:03' - ts, + ts - timestamp '2001-01-01 01:02:03' +from vector_interval_1 order by ts +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_1 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: ts (type: timestamp), (ts - ts) (type: interval_day_time), (2001-01-01 01:02:03.0 - ts) (type: interval_day_time), (ts - 2001-01-01 01:02:03.0) (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + ts, + ts - ts, + timestamp '2001-01-01 01:02:03' - ts, + ts - timestamp '2001-01-01 01:02:03' +from vector_interval_1 order by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +POSTHOOK: query: select + ts, + ts - ts, + timestamp '2001-01-01 01:02:03' - ts, + ts - timestamp '2001-01-01 01:02:03' +from vector_interval_1 order by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +NULL NULL NULL NULL +2001-01-01 01:02:03 0 00:00:00.000000000 0 00:00:00.000000000 0 00:00:00.000000000 +PREHOOK: query: -- date-date arithmetic +explain +select + dt, + dt - dt, + date '2001-01-01' - dt, + dt - date '2001-01-01' +from vector_interval_1 order by dt +PREHOOK: type: QUERY +POSTHOOK: query: -- date-date arithmetic +explain +select + dt, + dt - dt, + date '2001-01-01' - dt, + dt - date '2001-01-01' +from vector_interval_1 order by dt +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_1 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dt (type: date), (dt - dt) (type: interval_day_time), (2001-01-01 - dt) (type: interval_day_time), (dt - 2001-01-01) (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt, + dt - dt, + date '2001-01-01' - dt, + dt - date '2001-01-01' +from vector_interval_1 order by dt +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt, + dt - dt, + date '2001-01-01' - dt, + dt - date '2001-01-01' +from vector_interval_1 order by dt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +NULL NULL NULL NULL +2001-01-01 0 00:00:00.000000000 0 00:00:00.000000000 0 00:00:00.000000000 +PREHOOK: query: -- date-timestamp arithmetic +explain +select + dt, + ts - dt, + timestamp '2001-01-01 01:02:03' - dt, + ts - date '2001-01-01', + dt - ts, + dt - timestamp '2001-01-01 01:02:03', + date '2001-01-01' - ts +from vector_interval_1 order by dt +PREHOOK: type: QUERY +POSTHOOK: query: -- date-timestamp arithmetic +explain +select + dt, + ts - dt, + timestamp '2001-01-01 01:02:03' - dt, + ts - date '2001-01-01', + dt - ts, + dt - timestamp '2001-01-01 01:02:03', + date '2001-01-01' - ts +from vector_interval_1 order by dt +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_1 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: dt (type: date), (ts - dt) (type: interval_day_time), (2001-01-01 01:02:03.0 - dt) (type: interval_day_time), (ts - 2001-01-01) (type: interval_day_time), (dt - ts) (type: interval_day_time), (dt - 2001-01-01 01:02:03.0) (type: interval_day_time), (2001-01-01 - ts) (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: date) + sort order: + + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: interval_day_time), _col2 (type: interval_day_time), _col3 (type: interval_day_time), _col4 (type: interval_day_time), _col5 (type: interval_day_time), _col6 (type: interval_day_time) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: date), VALUE._col0 (type: interval_day_time), VALUE._col1 (type: interval_day_time), VALUE._col2 (type: interval_day_time), VALUE._col3 (type: interval_day_time), VALUE._col4 (type: interval_day_time), VALUE._col5 (type: interval_day_time) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 442 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + dt, + ts - dt, + timestamp '2001-01-01 01:02:03' - dt, + ts - date '2001-01-01', + dt - ts, + dt - timestamp '2001-01-01 01:02:03', + date '2001-01-01' - ts +from vector_interval_1 order by dt +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +POSTHOOK: query: select + dt, + ts - dt, + timestamp '2001-01-01 01:02:03' - dt, + ts - date '2001-01-01', + dt - ts, + dt - timestamp '2001-01-01 01:02:03', + date '2001-01-01' - ts +from vector_interval_1 order by dt +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_1 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL +2001-01-01 0 01:02:03.000000000 0 01:02:03.000000000 0 01:02:03.000000000 -0 01:02:03.000000000 -0 01:02:03.000000000 -0 01:02:03.000000000 diff --git a/ql/src/test/results/clientpositive/vector_interval_2.q.out b/ql/src/test/results/clientpositive/vector_interval_2.q.out new file mode 100644 index 0000000..b70ef7a --- /dev/null +++ b/ql/src/test/results/clientpositive/vector_interval_2.q.out @@ -0,0 +1,1550 @@ +PREHOOK: query: drop table if exists vector_interval_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table if exists vector_interval_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table vector_interval_2 (ts timestamp, dt date, str1 string, str2 string, str3 string, str4 string) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@vector_interval_2 +POSTHOOK: query: create table vector_interval_2 (ts timestamp, dt date, str1 string, str2 string, str3 string, str4 string) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@vector_interval_2 +PREHOOK: query: insert into vector_interval_2 + select timestamp '2001-01-01 01:02:03', date '2001-01-01', '1-2', '1-3', '1 2:3:4', '1 2:3:5' from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@vector_interval_2 +POSTHOOK: query: insert into vector_interval_2 + select timestamp '2001-01-01 01:02:03', date '2001-01-01', '1-2', '1-3', '1 2:3:4', '1 2:3:5' from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@vector_interval_2 +POSTHOOK: Lineage: vector_interval_2.dt SIMPLE [] +POSTHOOK: Lineage: vector_interval_2.str1 SIMPLE [] +POSTHOOK: Lineage: vector_interval_2.str2 SIMPLE [] +POSTHOOK: Lineage: vector_interval_2.str3 SIMPLE [] +POSTHOOK: Lineage: vector_interval_2.str4 SIMPLE [] +POSTHOOK: Lineage: vector_interval_2.ts SIMPLE [] +PREHOOK: query: insert into vector_interval_2 + select null, null, null, null, null, null from src limit 1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@vector_interval_2 +POSTHOOK: query: insert into vector_interval_2 + select null, null, null, null, null, null from src limit 1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@vector_interval_2 +POSTHOOK: Lineage: vector_interval_2.dt EXPRESSION [] +POSTHOOK: Lineage: vector_interval_2.str1 EXPRESSION [] +POSTHOOK: Lineage: vector_interval_2.str2 EXPRESSION [] +POSTHOOK: Lineage: vector_interval_2.str3 EXPRESSION [] +POSTHOOK: Lineage: vector_interval_2.str4 EXPRESSION [] +POSTHOOK: Lineage: vector_interval_2.ts EXPRESSION [] +PREHOOK: query: -- interval comparisons in select clause + +explain +select + str1, + -- Should all be true + interval_year_month(str1) = interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str2), + interval_year_month(str1) < interval_year_month(str2), + interval_year_month(str1) >= interval_year_month(str1), + interval_year_month(str2) >= interval_year_month(str1), + interval_year_month(str2) > interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str2), + + interval_year_month(str1) = interval '1-2' year to month, + interval_year_month(str1) <= interval '1-2' year to month, + interval_year_month(str1) <= interval '1-3' year to month, + interval_year_month(str1) < interval '1-3' year to month, + interval_year_month(str1) >= interval '1-2' year to month, + interval_year_month(str2) >= interval '1-2' year to month, + interval_year_month(str2) > interval '1-2' year to month, + interval_year_month(str1) != interval '1-3' year to month, + + interval '1-2' year to month = interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str2), + interval '1-2' year to month < interval_year_month(str2), + interval '1-2' year to month >= interval_year_month(str1), + interval '1-3' year to month >= interval_year_month(str1), + interval '1-3' year to month > interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str2) +from vector_interval_2 order by str1 +PREHOOK: type: QUERY +POSTHOOK: query: -- interval comparisons in select clause + +explain +select + str1, + -- Should all be true + interval_year_month(str1) = interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str2), + interval_year_month(str1) < interval_year_month(str2), + interval_year_month(str1) >= interval_year_month(str1), + interval_year_month(str2) >= interval_year_month(str1), + interval_year_month(str2) > interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str2), + + interval_year_month(str1) = interval '1-2' year to month, + interval_year_month(str1) <= interval '1-2' year to month, + interval_year_month(str1) <= interval '1-3' year to month, + interval_year_month(str1) < interval '1-3' year to month, + interval_year_month(str1) >= interval '1-2' year to month, + interval_year_month(str2) >= interval '1-2' year to month, + interval_year_month(str2) > interval '1-2' year to month, + interval_year_month(str1) != interval '1-3' year to month, + + interval '1-2' year to month = interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str2), + interval '1-2' year to month < interval_year_month(str2), + interval '1-2' year to month >= interval_year_month(str1), + interval '1-3' year to month >= interval_year_month(str1), + interval '1-3' year to month > interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str2) +from vector_interval_2 order by str1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_2 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: str1 (type: string), (CAST( str1 AS INTERVAL YEAR TO MONTH) = CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) <= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) <= CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) < CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) >= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) >= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) > CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) <> CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) = 1-2) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) <= 1-2) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) <= 1-3) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) < 1-3) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) >= 1-2) (type: boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) >= 1-2) (type: boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) > 1-2) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) <> 1-3) (type: boolean), (1-2 = CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-2 <= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-2 <= CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-2 < CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-2 >= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-3 >= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-3 > CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-2 <> CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean), _col10 (type: boolean), _col11 (type: boolean), _col12 (type: boolean), _col13 (type: boolean), _col14 (type: boolean), _col15 (type: boolean), _col16 (type: boolean), _col17 (type: boolean), _col18 (type: boolean), _col19 (type: boolean), _col20 (type: boolean), _col21 (type: boolean), _col22 (type: boolean), _col23 (type: boolean), _col24 (type: boolean) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: boolean), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean), VALUE._col8 (type: boolean), VALUE._col9 (type: boolean), VALUE._col10 (type: boolean), VALUE._col11 (type: boolean), VALUE._col12 (type: boolean), VALUE._col13 (type: boolean), VALUE._col14 (type: boolean), VALUE._col15 (type: boolean), VALUE._col16 (type: boolean), VALUE._col17 (type: boolean), VALUE._col18 (type: boolean), VALUE._col19 (type: boolean), VALUE._col20 (type: boolean), VALUE._col21 (type: boolean), VALUE._col22 (type: boolean), VALUE._col23 (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + str1, + -- Should all be true + interval_year_month(str1) = interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str2), + interval_year_month(str1) < interval_year_month(str2), + interval_year_month(str1) >= interval_year_month(str1), + interval_year_month(str2) >= interval_year_month(str1), + interval_year_month(str2) > interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str2), + + interval_year_month(str1) = interval '1-2' year to month, + interval_year_month(str1) <= interval '1-2' year to month, + interval_year_month(str1) <= interval '1-3' year to month, + interval_year_month(str1) < interval '1-3' year to month, + interval_year_month(str1) >= interval '1-2' year to month, + interval_year_month(str2) >= interval '1-2' year to month, + interval_year_month(str2) > interval '1-2' year to month, + interval_year_month(str1) != interval '1-3' year to month, + + interval '1-2' year to month = interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str2), + interval '1-2' year to month < interval_year_month(str2), + interval '1-2' year to month >= interval_year_month(str1), + interval '1-3' year to month >= interval_year_month(str1), + interval '1-3' year to month > interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str2) +from vector_interval_2 order by str1 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +POSTHOOK: query: select + str1, + -- Should all be true + interval_year_month(str1) = interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str1), + interval_year_month(str1) <= interval_year_month(str2), + interval_year_month(str1) < interval_year_month(str2), + interval_year_month(str1) >= interval_year_month(str1), + interval_year_month(str2) >= interval_year_month(str1), + interval_year_month(str2) > interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str2), + + interval_year_month(str1) = interval '1-2' year to month, + interval_year_month(str1) <= interval '1-2' year to month, + interval_year_month(str1) <= interval '1-3' year to month, + interval_year_month(str1) < interval '1-3' year to month, + interval_year_month(str1) >= interval '1-2' year to month, + interval_year_month(str2) >= interval '1-2' year to month, + interval_year_month(str2) > interval '1-2' year to month, + interval_year_month(str1) != interval '1-3' year to month, + + interval '1-2' year to month = interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str1), + interval '1-2' year to month <= interval_year_month(str2), + interval '1-2' year to month < interval_year_month(str2), + interval '1-2' year to month >= interval_year_month(str1), + interval '1-3' year to month >= interval_year_month(str1), + interval '1-3' year to month > interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str2) +from vector_interval_2 order by str1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +1-2 true true true true true true true true true true true true true true true true true true true true true true true true +PREHOOK: query: explain +select + str1, + -- Should all be false + interval_year_month(str1) != interval_year_month(str1), + interval_year_month(str1) >= interval_year_month(str2), + interval_year_month(str1) > interval_year_month(str2), + interval_year_month(str2) <= interval_year_month(str1), + interval_year_month(str2) < interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str1), + + interval_year_month(str1) != interval '1-2' year to month, + interval_year_month(str1) >= interval '1-3' year to month, + interval_year_month(str1) > interval '1-3' year to month, + interval_year_month(str2) <= interval '1-2' year to month, + interval_year_month(str2) < interval '1-2' year to month, + interval_year_month(str1) != interval '1-2' year to month, + + interval '1-2' year to month != interval_year_month(str1), + interval '1-2' year to month >= interval_year_month(str2), + interval '1-2' year to month > interval_year_month(str2), + interval '1-3' year to month <= interval_year_month(str1), + interval '1-3' year to month < interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str1) +from vector_interval_2 order by str1 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + str1, + -- Should all be false + interval_year_month(str1) != interval_year_month(str1), + interval_year_month(str1) >= interval_year_month(str2), + interval_year_month(str1) > interval_year_month(str2), + interval_year_month(str2) <= interval_year_month(str1), + interval_year_month(str2) < interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str1), + + interval_year_month(str1) != interval '1-2' year to month, + interval_year_month(str1) >= interval '1-3' year to month, + interval_year_month(str1) > interval '1-3' year to month, + interval_year_month(str2) <= interval '1-2' year to month, + interval_year_month(str2) < interval '1-2' year to month, + interval_year_month(str1) != interval '1-2' year to month, + + interval '1-2' year to month != interval_year_month(str1), + interval '1-2' year to month >= interval_year_month(str2), + interval '1-2' year to month > interval_year_month(str2), + interval '1-3' year to month <= interval_year_month(str1), + interval '1-3' year to month < interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str1) +from vector_interval_2 order by str1 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_2 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: str1 (type: string), (CAST( str1 AS INTERVAL YEAR TO MONTH) <> CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) <= 1-2) (type: boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) < 1-2) (type: boolean), (1-2 <> CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-2 >= CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-2 > CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-3 <= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-3 < CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) >= CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) > CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) <= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) < CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) <> 1-2) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) >= 1-3) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) > 1-3) (type: boolean) + outputColumnNames: _col0, _col1, _col10, _col11, _col13, _col14, _col15, _col16, _col17, _col2, _col3, _col4, _col5, _col7, _col8, _col9 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean), _col10 (type: boolean), _col11 (type: boolean), _col13 (type: boolean), _col14 (type: boolean), _col15 (type: boolean), _col16 (type: boolean), _col17 (type: boolean) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: boolean), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col0 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean), VALUE._col8 (type: boolean), VALUE._col9 (type: boolean), VALUE._col5 (type: boolean), VALUE._col10 (type: boolean), VALUE._col11 (type: boolean), VALUE._col12 (type: boolean), VALUE._col13 (type: boolean), VALUE._col14 (type: boolean), VALUE._col10 (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + str1, + -- Should all be false + interval_year_month(str1) != interval_year_month(str1), + interval_year_month(str1) >= interval_year_month(str2), + interval_year_month(str1) > interval_year_month(str2), + interval_year_month(str2) <= interval_year_month(str1), + interval_year_month(str2) < interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str1), + + interval_year_month(str1) != interval '1-2' year to month, + interval_year_month(str1) >= interval '1-3' year to month, + interval_year_month(str1) > interval '1-3' year to month, + interval_year_month(str2) <= interval '1-2' year to month, + interval_year_month(str2) < interval '1-2' year to month, + interval_year_month(str1) != interval '1-2' year to month, + + interval '1-2' year to month != interval_year_month(str1), + interval '1-2' year to month >= interval_year_month(str2), + interval '1-2' year to month > interval_year_month(str2), + interval '1-3' year to month <= interval_year_month(str1), + interval '1-3' year to month < interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str1) +from vector_interval_2 order by str1 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +POSTHOOK: query: select + str1, + -- Should all be false + interval_year_month(str1) != interval_year_month(str1), + interval_year_month(str1) >= interval_year_month(str2), + interval_year_month(str1) > interval_year_month(str2), + interval_year_month(str2) <= interval_year_month(str1), + interval_year_month(str2) < interval_year_month(str1), + interval_year_month(str1) != interval_year_month(str1), + + interval_year_month(str1) != interval '1-2' year to month, + interval_year_month(str1) >= interval '1-3' year to month, + interval_year_month(str1) > interval '1-3' year to month, + interval_year_month(str2) <= interval '1-2' year to month, + interval_year_month(str2) < interval '1-2' year to month, + interval_year_month(str1) != interval '1-2' year to month, + + interval '1-2' year to month != interval_year_month(str1), + interval '1-2' year to month >= interval_year_month(str2), + interval '1-2' year to month > interval_year_month(str2), + interval '1-3' year to month <= interval_year_month(str1), + interval '1-3' year to month < interval_year_month(str1), + interval '1-2' year to month != interval_year_month(str1) +from vector_interval_2 order by str1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +1-2 false false false false false false false false false false false false false false false false false false +PREHOOK: query: explain +select + str3, + -- Should all be true + interval_day_time(str3) = interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str4), + interval_day_time(str3) < interval_day_time(str4), + interval_day_time(str3) >= interval_day_time(str3), + interval_day_time(str4) >= interval_day_time(str3), + interval_day_time(str4) > interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str4), + + interval_day_time(str3) = interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:5' day to second, + interval_day_time(str3) < interval '1 2:3:5' day to second, + interval_day_time(str3) >= interval '1 2:3:4' day to second, + interval_day_time(str4) >= interval '1 2:3:4' day to second, + interval_day_time(str4) > interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:5' day to second, + + interval '1 2:3:4' day to second = interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str4), + interval '1 2:3:4' day to second < interval_day_time(str4), + interval '1 2:3:4' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second > interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str4) +from vector_interval_2 order by str3 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + str3, + -- Should all be true + interval_day_time(str3) = interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str4), + interval_day_time(str3) < interval_day_time(str4), + interval_day_time(str3) >= interval_day_time(str3), + interval_day_time(str4) >= interval_day_time(str3), + interval_day_time(str4) > interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str4), + + interval_day_time(str3) = interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:5' day to second, + interval_day_time(str3) < interval '1 2:3:5' day to second, + interval_day_time(str3) >= interval '1 2:3:4' day to second, + interval_day_time(str4) >= interval '1 2:3:4' day to second, + interval_day_time(str4) > interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:5' day to second, + + interval '1 2:3:4' day to second = interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str4), + interval '1 2:3:4' day to second < interval_day_time(str4), + interval '1 2:3:4' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second > interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str4) +from vector_interval_2 order by str3 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_2 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: str3 (type: string), (CAST( str3 AS INTERVAL DAY TO SECOND) = CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) <= CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) <= CAST( str4 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) < CAST( str4 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) >= CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str4 AS INTERVAL DAY TO SECOND) >= CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str4 AS INTERVAL DAY TO SECOND) > CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) <> CAST( str4 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) = 1 02:03:04.000000000) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) <= 1 02:03:04.000000000) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) <= 1 02:03:05.000000000) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) < 1 02:03:05.000000000) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) >= 1 02:03:04.000000000) (type: boolean), (CAST( str4 AS INTERVAL DAY TO SECOND) >= 1 02:03:04.000000000) (type: boolean), (CAST( str4 AS INTERVAL DAY TO SECOND) > 1 02:03:04.000000000) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) <> 1 02:03:05.000000000) (type: boolean), (1 02:03:04.000000000 = CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:04.000000000 <= CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:04.000000000 <= CAST( str4 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:04.000000000 < CAST( str4 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:04.000000000 >= CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:05.000000000 >= CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:05.000000000 > CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:04.000000000 <> CAST( str4 AS INTERVAL DAY TO SECOND)) (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean), _col10 (type: boolean), _col11 (type: boolean), _col12 (type: boolean), _col13 (type: boolean), _col14 (type: boolean), _col15 (type: boolean), _col16 (type: boolean), _col17 (type: boolean), _col18 (type: boolean), _col19 (type: boolean), _col20 (type: boolean), _col21 (type: boolean), _col22 (type: boolean), _col23 (type: boolean), _col24 (type: boolean) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: boolean), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean), VALUE._col8 (type: boolean), VALUE._col9 (type: boolean), VALUE._col10 (type: boolean), VALUE._col11 (type: boolean), VALUE._col12 (type: boolean), VALUE._col13 (type: boolean), VALUE._col14 (type: boolean), VALUE._col15 (type: boolean), VALUE._col16 (type: boolean), VALUE._col17 (type: boolean), VALUE._col18 (type: boolean), VALUE._col19 (type: boolean), VALUE._col20 (type: boolean), VALUE._col21 (type: boolean), VALUE._col22 (type: boolean), VALUE._col23 (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + str3, + -- Should all be true + interval_day_time(str3) = interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str4), + interval_day_time(str3) < interval_day_time(str4), + interval_day_time(str3) >= interval_day_time(str3), + interval_day_time(str4) >= interval_day_time(str3), + interval_day_time(str4) > interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str4), + + interval_day_time(str3) = interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:5' day to second, + interval_day_time(str3) < interval '1 2:3:5' day to second, + interval_day_time(str3) >= interval '1 2:3:4' day to second, + interval_day_time(str4) >= interval '1 2:3:4' day to second, + interval_day_time(str4) > interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:5' day to second, + + interval '1 2:3:4' day to second = interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str4), + interval '1 2:3:4' day to second < interval_day_time(str4), + interval '1 2:3:4' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second > interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str4) +from vector_interval_2 order by str3 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +POSTHOOK: query: select + str3, + -- Should all be true + interval_day_time(str3) = interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str3), + interval_day_time(str3) <= interval_day_time(str4), + interval_day_time(str3) < interval_day_time(str4), + interval_day_time(str3) >= interval_day_time(str3), + interval_day_time(str4) >= interval_day_time(str3), + interval_day_time(str4) > interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str4), + + interval_day_time(str3) = interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:4' day to second, + interval_day_time(str3) <= interval '1 2:3:5' day to second, + interval_day_time(str3) < interval '1 2:3:5' day to second, + interval_day_time(str3) >= interval '1 2:3:4' day to second, + interval_day_time(str4) >= interval '1 2:3:4' day to second, + interval_day_time(str4) > interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:5' day to second, + + interval '1 2:3:4' day to second = interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str3), + interval '1 2:3:4' day to second <= interval_day_time(str4), + interval '1 2:3:4' day to second < interval_day_time(str4), + interval '1 2:3:4' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second >= interval_day_time(str3), + interval '1 2:3:5' day to second > interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str4) +from vector_interval_2 order by str3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +1 2:3:4 true true true true true true true true true true true true true true true true true true true true true true true true +PREHOOK: query: explain +select + str3, + -- Should all be false + interval_day_time(str3) != interval_day_time(str3), + interval_day_time(str3) >= interval_day_time(str4), + interval_day_time(str3) > interval_day_time(str4), + interval_day_time(str4) <= interval_day_time(str3), + interval_day_time(str4) < interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str3), + + interval_day_time(str3) != interval '1 2:3:4' day to second, + interval_day_time(str3) >= interval '1 2:3:5' day to second, + interval_day_time(str3) > interval '1 2:3:5' day to second, + interval_day_time(str4) <= interval '1 2:3:4' day to second, + interval_day_time(str4) < interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:4' day to second, + + interval '1 2:3:4' day to second != interval_day_time(str3), + interval '1 2:3:4' day to second >= interval_day_time(str4), + interval '1 2:3:4' day to second > interval_day_time(str4), + interval '1 2:3:5' day to second <= interval_day_time(str3), + interval '1 2:3:5' day to second < interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str3) +from vector_interval_2 order by str3 +PREHOOK: type: QUERY +POSTHOOK: query: explain +select + str3, + -- Should all be false + interval_day_time(str3) != interval_day_time(str3), + interval_day_time(str3) >= interval_day_time(str4), + interval_day_time(str3) > interval_day_time(str4), + interval_day_time(str4) <= interval_day_time(str3), + interval_day_time(str4) < interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str3), + + interval_day_time(str3) != interval '1 2:3:4' day to second, + interval_day_time(str3) >= interval '1 2:3:5' day to second, + interval_day_time(str3) > interval '1 2:3:5' day to second, + interval_day_time(str4) <= interval '1 2:3:4' day to second, + interval_day_time(str4) < interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:4' day to second, + + interval '1 2:3:4' day to second != interval_day_time(str3), + interval '1 2:3:4' day to second >= interval_day_time(str4), + interval '1 2:3:4' day to second > interval_day_time(str4), + interval '1 2:3:5' day to second <= interval_day_time(str3), + interval '1 2:3:5' day to second < interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str3) +from vector_interval_2 order by str3 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_2 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: str3 (type: string), (CAST( str3 AS INTERVAL DAY TO SECOND) <> CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str4 AS INTERVAL DAY TO SECOND) <= 1 02:03:04.000000000) (type: boolean), (CAST( str4 AS INTERVAL DAY TO SECOND) < 1 02:03:04.000000000) (type: boolean), (1 02:03:04.000000000 <> CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:04.000000000 >= CAST( str4 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:04.000000000 > CAST( str4 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:05.000000000 <= CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (1 02:03:05.000000000 < CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) >= CAST( str4 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) > CAST( str4 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str4 AS INTERVAL DAY TO SECOND) <= CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str4 AS INTERVAL DAY TO SECOND) < CAST( str3 AS INTERVAL DAY TO SECOND)) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) <> 1 02:03:04.000000000) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) >= 1 02:03:05.000000000) (type: boolean), (CAST( str3 AS INTERVAL DAY TO SECOND) > 1 02:03:05.000000000) (type: boolean) + outputColumnNames: _col0, _col1, _col10, _col11, _col13, _col14, _col15, _col16, _col17, _col2, _col3, _col4, _col5, _col7, _col8, _col9 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: boolean), _col2 (type: boolean), _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean), _col7 (type: boolean), _col8 (type: boolean), _col9 (type: boolean), _col10 (type: boolean), _col11 (type: boolean), _col13 (type: boolean), _col14 (type: boolean), _col15 (type: boolean), _col16 (type: boolean), _col17 (type: boolean) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: boolean), VALUE._col1 (type: boolean), VALUE._col2 (type: boolean), VALUE._col3 (type: boolean), VALUE._col4 (type: boolean), VALUE._col0 (type: boolean), VALUE._col5 (type: boolean), VALUE._col6 (type: boolean), VALUE._col7 (type: boolean), VALUE._col8 (type: boolean), VALUE._col9 (type: boolean), VALUE._col5 (type: boolean), VALUE._col10 (type: boolean), VALUE._col11 (type: boolean), VALUE._col12 (type: boolean), VALUE._col13 (type: boolean), VALUE._col14 (type: boolean), VALUE._col10 (type: boolean) + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select + str3, + -- Should all be false + interval_day_time(str3) != interval_day_time(str3), + interval_day_time(str3) >= interval_day_time(str4), + interval_day_time(str3) > interval_day_time(str4), + interval_day_time(str4) <= interval_day_time(str3), + interval_day_time(str4) < interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str3), + + interval_day_time(str3) != interval '1 2:3:4' day to second, + interval_day_time(str3) >= interval '1 2:3:5' day to second, + interval_day_time(str3) > interval '1 2:3:5' day to second, + interval_day_time(str4) <= interval '1 2:3:4' day to second, + interval_day_time(str4) < interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:4' day to second, + + interval '1 2:3:4' day to second != interval_day_time(str3), + interval '1 2:3:4' day to second >= interval_day_time(str4), + interval '1 2:3:4' day to second > interval_day_time(str4), + interval '1 2:3:5' day to second <= interval_day_time(str3), + interval '1 2:3:5' day to second < interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str3) +from vector_interval_2 order by str3 +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +POSTHOOK: query: select + str3, + -- Should all be false + interval_day_time(str3) != interval_day_time(str3), + interval_day_time(str3) >= interval_day_time(str4), + interval_day_time(str3) > interval_day_time(str4), + interval_day_time(str4) <= interval_day_time(str3), + interval_day_time(str4) < interval_day_time(str3), + interval_day_time(str3) != interval_day_time(str3), + + interval_day_time(str3) != interval '1 2:3:4' day to second, + interval_day_time(str3) >= interval '1 2:3:5' day to second, + interval_day_time(str3) > interval '1 2:3:5' day to second, + interval_day_time(str4) <= interval '1 2:3:4' day to second, + interval_day_time(str4) < interval '1 2:3:4' day to second, + interval_day_time(str3) != interval '1 2:3:4' day to second, + + interval '1 2:3:4' day to second != interval_day_time(str3), + interval '1 2:3:4' day to second >= interval_day_time(str4), + interval '1 2:3:4' day to second > interval_day_time(str4), + interval '1 2:3:5' day to second <= interval_day_time(str3), + interval '1 2:3:5' day to second < interval_day_time(str3), + interval '1 2:3:4' day to second != interval_day_time(str3) +from vector_interval_2 order by str3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL +1 2:3:4 false false false false false false false false false false false false false false false false false false +PREHOOK: query: -- interval expressions in predicates +explain +select ts from vector_interval_2 +where + interval_year_month(str1) = interval_year_month(str1) + and interval_year_month(str1) != interval_year_month(str2) + and interval_year_month(str1) <= interval_year_month(str2) + and interval_year_month(str1) < interval_year_month(str2) + and interval_year_month(str2) >= interval_year_month(str1) + and interval_year_month(str2) > interval_year_month(str1) + + and interval_year_month(str1) = interval '1-2' year to month + and interval_year_month(str1) != interval '1-3' year to month + and interval_year_month(str1) <= interval '1-3' year to month + and interval_year_month(str1) < interval '1-3' year to month + and interval_year_month(str2) >= interval '1-2' year to month + and interval_year_month(str2) > interval '1-2' year to month + + and interval '1-2' year to month = interval_year_month(str1) + and interval '1-2' year to month != interval_year_month(str2) + and interval '1-2' year to month <= interval_year_month(str2) + and interval '1-2' year to month < interval_year_month(str2) + and interval '1-3' year to month >= interval_year_month(str1) + and interval '1-3' year to month > interval_year_month(str1) +order by ts +PREHOOK: type: QUERY +POSTHOOK: query: -- interval expressions in predicates +explain +select ts from vector_interval_2 +where + interval_year_month(str1) = interval_year_month(str1) + and interval_year_month(str1) != interval_year_month(str2) + and interval_year_month(str1) <= interval_year_month(str2) + and interval_year_month(str1) < interval_year_month(str2) + and interval_year_month(str2) >= interval_year_month(str1) + and interval_year_month(str2) > interval_year_month(str1) + + and interval_year_month(str1) = interval '1-2' year to month + and interval_year_month(str1) != interval '1-3' year to month + and interval_year_month(str1) <= interval '1-3' year to month + and interval_year_month(str1) < interval '1-3' year to month + and interval_year_month(str2) >= interval '1-2' year to month + and interval_year_month(str2) > interval '1-2' year to month + + and interval '1-2' year to month = interval_year_month(str1) + and interval '1-2' year to month != interval_year_month(str2) + and interval '1-2' year to month <= interval_year_month(str2) + and interval '1-2' year to month < interval_year_month(str2) + and interval '1-3' year to month >= interval_year_month(str1) + and interval '1-3' year to month > interval_year_month(str1) +order by ts +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_2 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((((((((((((((((((CAST( str1 AS INTERVAL YEAR TO MONTH) = CAST( str1 AS INTERVAL YEAR TO MONTH)) and (CAST( str1 AS INTERVAL YEAR TO MONTH) <> CAST( str2 AS INTERVAL YEAR TO MONTH))) and (CAST( str1 AS INTERVAL YEAR TO MONTH) <= CAST( str2 AS INTERVAL YEAR TO MONTH))) and (CAST( str1 AS INTERVAL YEAR TO MONTH) < CAST( str2 AS INTERVAL YEAR TO MONTH))) and (CAST( str2 AS INTERVAL YEAR TO MONTH) >= CAST( str1 AS INTERVAL YEAR TO MONTH))) and (CAST( str2 AS INTERVAL YEAR TO MONTH) > CAST( str1 AS INTERVAL YEAR TO MONTH))) and (CAST( str1 AS INTERVAL YEAR TO MONTH) = 1-2)) and (CAST( str1 AS INTERVAL YEAR TO MONTH) <> 1-3)) and (CAST( str1 AS INTERVAL YEAR TO MONTH) <= 1-3)) and (CAST( str1 AS INTERVAL YEAR TO MONTH) < 1-3)) and (CAST( str2 AS INTERVAL YEAR TO MONTH) >= 1-2)) and (CAST( str2 AS INTERVAL YEAR TO MONTH) > 1-2)) and (1-2 = CAST( str1 AS INTERVAL YEAR TO MONTH))) and (1-2 <> CAST( str2 AS INTERVAL YEAR TO MONTH))) and (1-2 <= CAST( str2 AS INTERVAL YEAR TO MONTH))) and (1-2 < CAST( str2 AS INTERVAL YEAR TO MONTH))) and (1-3 >= CAST( str1 AS INTERVAL YEAR TO MONTH))) and (1-3 > CAST( str1 AS INTERVAL YEAR TO MONTH))) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: ts (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select ts from vector_interval_2 +where + interval_year_month(str1) = interval_year_month(str1) + and interval_year_month(str1) != interval_year_month(str2) + and interval_year_month(str1) <= interval_year_month(str2) + and interval_year_month(str1) < interval_year_month(str2) + and interval_year_month(str2) >= interval_year_month(str1) + and interval_year_month(str2) > interval_year_month(str1) + + and interval_year_month(str1) = interval '1-2' year to month + and interval_year_month(str1) != interval '1-3' year to month + and interval_year_month(str1) <= interval '1-3' year to month + and interval_year_month(str1) < interval '1-3' year to month + and interval_year_month(str2) >= interval '1-2' year to month + and interval_year_month(str2) > interval '1-2' year to month + + and interval '1-2' year to month = interval_year_month(str1) + and interval '1-2' year to month != interval_year_month(str2) + and interval '1-2' year to month <= interval_year_month(str2) + and interval '1-2' year to month < interval_year_month(str2) + and interval '1-3' year to month >= interval_year_month(str1) + and interval '1-3' year to month > interval_year_month(str1) +order by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +POSTHOOK: query: select ts from vector_interval_2 +where + interval_year_month(str1) = interval_year_month(str1) + and interval_year_month(str1) != interval_year_month(str2) + and interval_year_month(str1) <= interval_year_month(str2) + and interval_year_month(str1) < interval_year_month(str2) + and interval_year_month(str2) >= interval_year_month(str1) + and interval_year_month(str2) > interval_year_month(str1) + + and interval_year_month(str1) = interval '1-2' year to month + and interval_year_month(str1) != interval '1-3' year to month + and interval_year_month(str1) <= interval '1-3' year to month + and interval_year_month(str1) < interval '1-3' year to month + and interval_year_month(str2) >= interval '1-2' year to month + and interval_year_month(str2) > interval '1-2' year to month + + and interval '1-2' year to month = interval_year_month(str1) + and interval '1-2' year to month != interval_year_month(str2) + and interval '1-2' year to month <= interval_year_month(str2) + and interval '1-2' year to month < interval_year_month(str2) + and interval '1-3' year to month >= interval_year_month(str1) + and interval '1-3' year to month > interval_year_month(str1) +order by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +2001-01-01 01:02:03 +PREHOOK: query: explain +select ts from vector_interval_2 +where + interval_day_time(str3) = interval_day_time(str3) + and interval_day_time(str3) != interval_day_time(str4) + and interval_day_time(str3) <= interval_day_time(str4) + and interval_day_time(str3) < interval_day_time(str4) + and interval_day_time(str4) >= interval_day_time(str3) + and interval_day_time(str4) > interval_day_time(str3) + + and interval_day_time(str3) = interval '1 2:3:4' day to second + and interval_day_time(str3) != interval '1 2:3:5' day to second + and interval_day_time(str3) <= interval '1 2:3:5' day to second + and interval_day_time(str3) < interval '1 2:3:5' day to second + and interval_day_time(str4) >= interval '1 2:3:4' day to second + and interval_day_time(str4) > interval '1 2:3:4' day to second + + and interval '1 2:3:4' day to second = interval_day_time(str3) + and interval '1 2:3:4' day to second != interval_day_time(str4) + and interval '1 2:3:4' day to second <= interval_day_time(str4) + and interval '1 2:3:4' day to second < interval_day_time(str4) + and interval '1 2:3:5' day to second >= interval_day_time(str3) + and interval '1 2:3:5' day to second > interval_day_time(str3) +order by ts +PREHOOK: type: QUERY +POSTHOOK: query: explain +select ts from vector_interval_2 +where + interval_day_time(str3) = interval_day_time(str3) + and interval_day_time(str3) != interval_day_time(str4) + and interval_day_time(str3) <= interval_day_time(str4) + and interval_day_time(str3) < interval_day_time(str4) + and interval_day_time(str4) >= interval_day_time(str3) + and interval_day_time(str4) > interval_day_time(str3) + + and interval_day_time(str3) = interval '1 2:3:4' day to second + and interval_day_time(str3) != interval '1 2:3:5' day to second + and interval_day_time(str3) <= interval '1 2:3:5' day to second + and interval_day_time(str3) < interval '1 2:3:5' day to second + and interval_day_time(str4) >= interval '1 2:3:4' day to second + and interval_day_time(str4) > interval '1 2:3:4' day to second + + and interval '1 2:3:4' day to second = interval_day_time(str3) + and interval '1 2:3:4' day to second != interval_day_time(str4) + and interval '1 2:3:4' day to second <= interval_day_time(str4) + and interval '1 2:3:4' day to second < interval_day_time(str4) + and interval '1 2:3:5' day to second >= interval_day_time(str3) + and interval '1 2:3:5' day to second > interval_day_time(str3) +order by ts +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_2 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((((((((((((((((((CAST( str3 AS INTERVAL DAY TO SECOND) = CAST( str3 AS INTERVAL DAY TO SECOND)) and (CAST( str3 AS INTERVAL DAY TO SECOND) <> CAST( str4 AS INTERVAL DAY TO SECOND))) and (CAST( str3 AS INTERVAL DAY TO SECOND) <= CAST( str4 AS INTERVAL DAY TO SECOND))) and (CAST( str3 AS INTERVAL DAY TO SECOND) < CAST( str4 AS INTERVAL DAY TO SECOND))) and (CAST( str4 AS INTERVAL DAY TO SECOND) >= CAST( str3 AS INTERVAL DAY TO SECOND))) and (CAST( str4 AS INTERVAL DAY TO SECOND) > CAST( str3 AS INTERVAL DAY TO SECOND))) and (CAST( str3 AS INTERVAL DAY TO SECOND) = 1 02:03:04.000000000)) and (CAST( str3 AS INTERVAL DAY TO SECOND) <> 1 02:03:05.000000000)) and (CAST( str3 AS INTERVAL DAY TO SECOND) <= 1 02:03:05.000000000)) and (CAST( str3 AS INTERVAL DAY TO SECOND) < 1 02:03:05.000000000)) and (CAST( str4 AS INTERVAL DAY TO SECOND) >= 1 02:03:04.000000000)) and (CAST( str4 AS INTERVAL DAY TO SECOND) > 1 02:03:04.000000000)) and (1 02:03:04.000000000 = CAST( str3 AS INTERVAL DAY TO SECOND))) and (1 02:03:04.000000000 <> CAST( str4 AS INTERVAL DAY TO SECOND))) and (1 02:03:04.000000000 <= CAST( str4 AS INTERVAL DAY TO SECOND))) and (1 02:03:04.000000000 < CAST( str4 AS INTERVAL DAY TO SECOND))) and (1 02:03:05.000000000 >= CAST( str3 AS INTERVAL DAY TO SECOND))) and (1 02:03:05.000000000 > CAST( str3 AS INTERVAL DAY TO SECOND))) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: ts (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select ts from vector_interval_2 +where + interval_day_time(str3) = interval_day_time(str3) + and interval_day_time(str3) != interval_day_time(str4) + and interval_day_time(str3) <= interval_day_time(str4) + and interval_day_time(str3) < interval_day_time(str4) + and interval_day_time(str4) >= interval_day_time(str3) + and interval_day_time(str4) > interval_day_time(str3) + + and interval_day_time(str3) = interval '1 2:3:4' day to second + and interval_day_time(str3) != interval '1 2:3:5' day to second + and interval_day_time(str3) <= interval '1 2:3:5' day to second + and interval_day_time(str3) < interval '1 2:3:5' day to second + and interval_day_time(str4) >= interval '1 2:3:4' day to second + and interval_day_time(str4) > interval '1 2:3:4' day to second + + and interval '1 2:3:4' day to second = interval_day_time(str3) + and interval '1 2:3:4' day to second != interval_day_time(str4) + and interval '1 2:3:4' day to second <= interval_day_time(str4) + and interval '1 2:3:4' day to second < interval_day_time(str4) + and interval '1 2:3:5' day to second >= interval_day_time(str3) + and interval '1 2:3:5' day to second > interval_day_time(str3) +order by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +POSTHOOK: query: select ts from vector_interval_2 +where + interval_day_time(str3) = interval_day_time(str3) + and interval_day_time(str3) != interval_day_time(str4) + and interval_day_time(str3) <= interval_day_time(str4) + and interval_day_time(str3) < interval_day_time(str4) + and interval_day_time(str4) >= interval_day_time(str3) + and interval_day_time(str4) > interval_day_time(str3) + + and interval_day_time(str3) = interval '1 2:3:4' day to second + and interval_day_time(str3) != interval '1 2:3:5' day to second + and interval_day_time(str3) <= interval '1 2:3:5' day to second + and interval_day_time(str3) < interval '1 2:3:5' day to second + and interval_day_time(str4) >= interval '1 2:3:4' day to second + and interval_day_time(str4) > interval '1 2:3:4' day to second + + and interval '1 2:3:4' day to second = interval_day_time(str3) + and interval '1 2:3:4' day to second != interval_day_time(str4) + and interval '1 2:3:4' day to second <= interval_day_time(str4) + and interval '1 2:3:4' day to second < interval_day_time(str4) + and interval '1 2:3:5' day to second >= interval_day_time(str3) + and interval '1 2:3:5' day to second > interval_day_time(str3) +order by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +2001-01-01 01:02:03 +PREHOOK: query: explain +select ts from vector_interval_2 +where + date '2002-03-01' = dt + interval_year_month(str1) + and date '2002-03-01' <= dt + interval_year_month(str1) + and date '2002-03-01' >= dt + interval_year_month(str1) + and dt + interval_year_month(str1) = date '2002-03-01' + and dt + interval_year_month(str1) <= date '2002-03-01' + and dt + interval_year_month(str1) >= date '2002-03-01' + and dt != dt + interval_year_month(str1) + + and date '2002-03-01' = dt + interval '1-2' year to month + and date '2002-03-01' <= dt + interval '1-2' year to month + and date '2002-03-01' >= dt + interval '1-2' year to month + and dt + interval '1-2' year to month = date '2002-03-01' + and dt + interval '1-2' year to month <= date '2002-03-01' + and dt + interval '1-2' year to month >= date '2002-03-01' + and dt != dt + interval '1-2' year to month +order by ts +PREHOOK: type: QUERY +POSTHOOK: query: explain +select ts from vector_interval_2 +where + date '2002-03-01' = dt + interval_year_month(str1) + and date '2002-03-01' <= dt + interval_year_month(str1) + and date '2002-03-01' >= dt + interval_year_month(str1) + and dt + interval_year_month(str1) = date '2002-03-01' + and dt + interval_year_month(str1) <= date '2002-03-01' + and dt + interval_year_month(str1) >= date '2002-03-01' + and dt != dt + interval_year_month(str1) + + and date '2002-03-01' = dt + interval '1-2' year to month + and date '2002-03-01' <= dt + interval '1-2' year to month + and date '2002-03-01' >= dt + interval '1-2' year to month + and dt + interval '1-2' year to month = date '2002-03-01' + and dt + interval '1-2' year to month <= date '2002-03-01' + and dt + interval '1-2' year to month >= date '2002-03-01' + and dt != dt + interval '1-2' year to month +order by ts +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_2 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((((((((((((((2002-03-01 = (dt + CAST( str1 AS INTERVAL YEAR TO MONTH))) and (2002-03-01 <= (dt + CAST( str1 AS INTERVAL YEAR TO MONTH)))) and (2002-03-01 >= (dt + CAST( str1 AS INTERVAL YEAR TO MONTH)))) and ((dt + CAST( str1 AS INTERVAL YEAR TO MONTH)) = 2002-03-01)) and ((dt + CAST( str1 AS INTERVAL YEAR TO MONTH)) <= 2002-03-01)) and ((dt + CAST( str1 AS INTERVAL YEAR TO MONTH)) >= 2002-03-01)) and (dt <> (dt + CAST( str1 AS INTERVAL YEAR TO MONTH)))) and (2002-03-01 = (dt + 1-2))) and (2002-03-01 <= (dt + 1-2))) and (2002-03-01 >= (dt + 1-2))) and ((dt + 1-2) = 2002-03-01)) and ((dt + 1-2) <= 2002-03-01)) and ((dt + 1-2) >= 2002-03-01)) and (dt <> (dt + 1-2))) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: ts (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select ts from vector_interval_2 +where + date '2002-03-01' = dt + interval_year_month(str1) + and date '2002-03-01' <= dt + interval_year_month(str1) + and date '2002-03-01' >= dt + interval_year_month(str1) + and dt + interval_year_month(str1) = date '2002-03-01' + and dt + interval_year_month(str1) <= date '2002-03-01' + and dt + interval_year_month(str1) >= date '2002-03-01' + and dt != dt + interval_year_month(str1) + + and date '2002-03-01' = dt + interval '1-2' year to month + and date '2002-03-01' <= dt + interval '1-2' year to month + and date '2002-03-01' >= dt + interval '1-2' year to month + and dt + interval '1-2' year to month = date '2002-03-01' + and dt + interval '1-2' year to month <= date '2002-03-01' + and dt + interval '1-2' year to month >= date '2002-03-01' + and dt != dt + interval '1-2' year to month +order by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +POSTHOOK: query: select ts from vector_interval_2 +where + date '2002-03-01' = dt + interval_year_month(str1) + and date '2002-03-01' <= dt + interval_year_month(str1) + and date '2002-03-01' >= dt + interval_year_month(str1) + and dt + interval_year_month(str1) = date '2002-03-01' + and dt + interval_year_month(str1) <= date '2002-03-01' + and dt + interval_year_month(str1) >= date '2002-03-01' + and dt != dt + interval_year_month(str1) + + and date '2002-03-01' = dt + interval '1-2' year to month + and date '2002-03-01' <= dt + interval '1-2' year to month + and date '2002-03-01' >= dt + interval '1-2' year to month + and dt + interval '1-2' year to month = date '2002-03-01' + and dt + interval '1-2' year to month <= date '2002-03-01' + and dt + interval '1-2' year to month >= date '2002-03-01' + and dt != dt + interval '1-2' year to month +order by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +2001-01-01 01:02:03 +PREHOOK: query: explain +select ts from vector_interval_2 +where + timestamp '2002-03-01 01:02:03' = ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' <= ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' >= ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' != ts + interval '1-2' year to month + and timestamp '2002-02-01 01:02:03' < ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' > ts + interval '1-2' year to month + + and ts + interval '1-2' year to month = timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month >= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month <= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month != timestamp '2002-04-01 01:02:03' + and ts + interval '1-2' year to month > timestamp '2002-02-01 01:02:03' + and ts + interval '1-2' year to month < timestamp '2002-04-01 01:02:03' + + and ts = ts + interval '0' year + and ts != ts + interval '1' year + and ts <= ts + interval '1' year + and ts < ts + interval '1' year + and ts >= ts - interval '1' year + and ts > ts - interval '1' year +order by ts +PREHOOK: type: QUERY +POSTHOOK: query: explain +select ts from vector_interval_2 +where + timestamp '2002-03-01 01:02:03' = ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' <= ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' >= ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' != ts + interval '1-2' year to month + and timestamp '2002-02-01 01:02:03' < ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' > ts + interval '1-2' year to month + + and ts + interval '1-2' year to month = timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month >= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month <= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month != timestamp '2002-04-01 01:02:03' + and ts + interval '1-2' year to month > timestamp '2002-02-01 01:02:03' + and ts + interval '1-2' year to month < timestamp '2002-04-01 01:02:03' + + and ts = ts + interval '0' year + and ts != ts + interval '1' year + and ts <= ts + interval '1' year + and ts < ts + interval '1' year + and ts >= ts - interval '1' year + and ts > ts - interval '1' year +order by ts +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_2 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((((((((((((((((((2002-03-01 01:02:03.0 = (ts + 1-2)) and (2002-03-01 01:02:03.0 <= (ts + 1-2))) and (2002-03-01 01:02:03.0 >= (ts + 1-2))) and (2002-04-01 01:02:03.0 <> (ts + 1-2))) and (2002-02-01 01:02:03.0 < (ts + 1-2))) and (2002-04-01 01:02:03.0 > (ts + 1-2))) and ((ts + 1-2) = 2002-03-01 01:02:03.0)) and ((ts + 1-2) >= 2002-03-01 01:02:03.0)) and ((ts + 1-2) <= 2002-03-01 01:02:03.0)) and ((ts + 1-2) <> 2002-04-01 01:02:03.0)) and ((ts + 1-2) > 2002-02-01 01:02:03.0)) and ((ts + 1-2) < 2002-04-01 01:02:03.0)) and (ts = (ts + 0-0))) and (ts <> (ts + 1-0))) and (ts <= (ts + 1-0))) and (ts < (ts + 1-0))) and (ts >= (ts - 1-0))) and (ts > (ts - 1-0))) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: ts (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select ts from vector_interval_2 +where + timestamp '2002-03-01 01:02:03' = ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' <= ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' >= ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' != ts + interval '1-2' year to month + and timestamp '2002-02-01 01:02:03' < ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' > ts + interval '1-2' year to month + + and ts + interval '1-2' year to month = timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month >= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month <= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month != timestamp '2002-04-01 01:02:03' + and ts + interval '1-2' year to month > timestamp '2002-02-01 01:02:03' + and ts + interval '1-2' year to month < timestamp '2002-04-01 01:02:03' + + and ts = ts + interval '0' year + and ts != ts + interval '1' year + and ts <= ts + interval '1' year + and ts < ts + interval '1' year + and ts >= ts - interval '1' year + and ts > ts - interval '1' year +order by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +POSTHOOK: query: select ts from vector_interval_2 +where + timestamp '2002-03-01 01:02:03' = ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' <= ts + interval '1-2' year to month + and timestamp '2002-03-01 01:02:03' >= ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' != ts + interval '1-2' year to month + and timestamp '2002-02-01 01:02:03' < ts + interval '1-2' year to month + and timestamp '2002-04-01 01:02:03' > ts + interval '1-2' year to month + + and ts + interval '1-2' year to month = timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month >= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month <= timestamp '2002-03-01 01:02:03' + and ts + interval '1-2' year to month != timestamp '2002-04-01 01:02:03' + and ts + interval '1-2' year to month > timestamp '2002-02-01 01:02:03' + and ts + interval '1-2' year to month < timestamp '2002-04-01 01:02:03' + + and ts = ts + interval '0' year + and ts != ts + interval '1' year + and ts <= ts + interval '1' year + and ts < ts + interval '1' year + and ts >= ts - interval '1' year + and ts > ts - interval '1' year +order by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +2001-01-01 01:02:03 +PREHOOK: query: -- day to second expressions in predicate +explain +select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' != dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' <= dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' < dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' >= dt - interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' > dt - interval '0 1:2:4' day to second + + and dt + interval '0 1:2:3' day to second = timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second != timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:3' day to second >= timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second > timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:3' day to second <= timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:4' day to second < timestamp '2001-01-01 01:02:03' + + and ts = dt + interval '0 1:2:3' day to second + and ts != dt + interval '0 1:2:4' day to second + and ts <= dt + interval '0 1:2:3' day to second + and ts < dt + interval '0 1:2:4' day to second + and ts >= dt - interval '0 1:2:3' day to second + and ts > dt - interval '0 1:2:4' day to second +order by ts +PREHOOK: type: QUERY +POSTHOOK: query: -- day to second expressions in predicate +explain +select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' != dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' <= dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' < dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' >= dt - interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' > dt - interval '0 1:2:4' day to second + + and dt + interval '0 1:2:3' day to second = timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second != timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:3' day to second >= timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second > timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:3' day to second <= timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:4' day to second < timestamp '2001-01-01 01:02:03' + + and ts = dt + interval '0 1:2:3' day to second + and ts != dt + interval '0 1:2:4' day to second + and ts <= dt + interval '0 1:2:3' day to second + and ts < dt + interval '0 1:2:4' day to second + and ts >= dt - interval '0 1:2:3' day to second + and ts > dt - interval '0 1:2:4' day to second +order by ts +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_2 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((((((((((((((((((2001-01-01 01:02:03.0 = (dt + 0 01:02:03.000000000)) and (2001-01-01 01:02:03.0 <> (dt + 0 01:02:04.000000000))) and (2001-01-01 01:02:03.0 <= (dt + 0 01:02:03.000000000))) and (2001-01-01 01:02:03.0 < (dt + 0 01:02:04.000000000))) and (2001-01-01 01:02:03.0 >= (dt - 0 01:02:03.000000000))) and (2001-01-01 01:02:03.0 > (dt - 0 01:02:04.000000000))) and ((dt + 0 01:02:03.000000000) = 2001-01-01 01:02:03.0)) and ((dt + 0 01:02:04.000000000) <> 2001-01-01 01:02:03.0)) and ((dt + 0 01:02:03.000000000) >= 2001-01-01 01:02:03.0)) and ((dt + 0 01:02:04.000000000) > 2001-01-01 01:02:03.0)) and ((dt - 0 01:02:03.000000000) <= 2001-01-01 01:02:03.0)) and ((dt - 0 01:02:04.000000000) < 2001-01-01 01:02:03.0)) and (ts = (dt + 0 01:02:03.000000000))) and (ts <> (dt + 0 01:02:04.000000000))) and (ts <= (dt + 0 01:02:03.000000000))) and (ts < (dt + 0 01:02:04.000000000))) and (ts >= (dt - 0 01:02:03.000000000))) and (ts > (dt - 0 01:02:04.000000000))) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: ts (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' != dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' <= dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' < dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' >= dt - interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' > dt - interval '0 1:2:4' day to second + + and dt + interval '0 1:2:3' day to second = timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second != timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:3' day to second >= timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second > timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:3' day to second <= timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:4' day to second < timestamp '2001-01-01 01:02:03' + + and ts = dt + interval '0 1:2:3' day to second + and ts != dt + interval '0 1:2:4' day to second + and ts <= dt + interval '0 1:2:3' day to second + and ts < dt + interval '0 1:2:4' day to second + and ts >= dt - interval '0 1:2:3' day to second + and ts > dt - interval '0 1:2:4' day to second +order by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +POSTHOOK: query: select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' != dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' <= dt + interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' < dt + interval '0 1:2:4' day to second + and timestamp '2001-01-01 01:02:03' >= dt - interval '0 1:2:3' day to second + and timestamp '2001-01-01 01:02:03' > dt - interval '0 1:2:4' day to second + + and dt + interval '0 1:2:3' day to second = timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second != timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:3' day to second >= timestamp '2001-01-01 01:02:03' + and dt + interval '0 1:2:4' day to second > timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:3' day to second <= timestamp '2001-01-01 01:02:03' + and dt - interval '0 1:2:4' day to second < timestamp '2001-01-01 01:02:03' + + and ts = dt + interval '0 1:2:3' day to second + and ts != dt + interval '0 1:2:4' day to second + and ts <= dt + interval '0 1:2:3' day to second + and ts < dt + interval '0 1:2:4' day to second + and ts >= dt - interval '0 1:2:3' day to second + and ts > dt - interval '0 1:2:4' day to second +order by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +2001-01-01 01:02:03 +PREHOOK: query: explain +select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = ts + interval '0' day + and timestamp '2001-01-01 01:02:03' != ts + interval '1' day + and timestamp '2001-01-01 01:02:03' <= ts + interval '1' day + and timestamp '2001-01-01 01:02:03' < ts + interval '1' day + and timestamp '2001-01-01 01:02:03' >= ts - interval '1' day + and timestamp '2001-01-01 01:02:03' > ts - interval '1' day + + and ts + interval '0' day = timestamp '2001-01-01 01:02:03' + and ts + interval '1' day != timestamp '2001-01-01 01:02:03' + and ts + interval '1' day >= timestamp '2001-01-01 01:02:03' + and ts + interval '1' day > timestamp '2001-01-01 01:02:03' + and ts - interval '1' day <= timestamp '2001-01-01 01:02:03' + and ts - interval '1' day < timestamp '2001-01-01 01:02:03' + + and ts = ts + interval '0' day + and ts != ts + interval '1' day + and ts <= ts + interval '1' day + and ts < ts + interval '1' day + and ts >= ts - interval '1' day + and ts > ts - interval '1' day +order by ts +PREHOOK: type: QUERY +POSTHOOK: query: explain +select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = ts + interval '0' day + and timestamp '2001-01-01 01:02:03' != ts + interval '1' day + and timestamp '2001-01-01 01:02:03' <= ts + interval '1' day + and timestamp '2001-01-01 01:02:03' < ts + interval '1' day + and timestamp '2001-01-01 01:02:03' >= ts - interval '1' day + and timestamp '2001-01-01 01:02:03' > ts - interval '1' day + + and ts + interval '0' day = timestamp '2001-01-01 01:02:03' + and ts + interval '1' day != timestamp '2001-01-01 01:02:03' + and ts + interval '1' day >= timestamp '2001-01-01 01:02:03' + and ts + interval '1' day > timestamp '2001-01-01 01:02:03' + and ts - interval '1' day <= timestamp '2001-01-01 01:02:03' + and ts - interval '1' day < timestamp '2001-01-01 01:02:03' + + and ts = ts + interval '0' day + and ts != ts + interval '1' day + and ts <= ts + interval '1' day + and ts < ts + interval '1' day + and ts >= ts - interval '1' day + and ts > ts - interval '1' day +order by ts +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: vector_interval_2 + Statistics: Num rows: 2 Data size: 788 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: ((((((((((((((((((2001-01-01 01:02:03.0 = (ts + 0 00:00:00.000000000)) and (2001-01-01 01:02:03.0 <> (ts + 1 00:00:00.000000000))) and (2001-01-01 01:02:03.0 <= (ts + 1 00:00:00.000000000))) and (2001-01-01 01:02:03.0 < (ts + 1 00:00:00.000000000))) and (2001-01-01 01:02:03.0 >= (ts - 1 00:00:00.000000000))) and (2001-01-01 01:02:03.0 > (ts - 1 00:00:00.000000000))) and ((ts + 0 00:00:00.000000000) = 2001-01-01 01:02:03.0)) and ((ts + 1 00:00:00.000000000) <> 2001-01-01 01:02:03.0)) and ((ts + 1 00:00:00.000000000) >= 2001-01-01 01:02:03.0)) and ((ts + 1 00:00:00.000000000) > 2001-01-01 01:02:03.0)) and ((ts - 1 00:00:00.000000000) <= 2001-01-01 01:02:03.0)) and ((ts - 1 00:00:00.000000000) < 2001-01-01 01:02:03.0)) and (ts = (ts + 0 00:00:00.000000000))) and (ts <> (ts + 1 00:00:00.000000000))) and (ts <= (ts + 1 00:00:00.000000000))) and (ts < (ts + 1 00:00:00.000000000))) and (ts >= (ts - 1 00:00:00.000000000))) and (ts > (ts - 1 00:00:00.000000000))) (type: boolean) + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Select Operator + expressions: ts (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: timestamp) + sort order: + + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: timestamp) + outputColumnNames: _col0 + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = ts + interval '0' day + and timestamp '2001-01-01 01:02:03' != ts + interval '1' day + and timestamp '2001-01-01 01:02:03' <= ts + interval '1' day + and timestamp '2001-01-01 01:02:03' < ts + interval '1' day + and timestamp '2001-01-01 01:02:03' >= ts - interval '1' day + and timestamp '2001-01-01 01:02:03' > ts - interval '1' day + + and ts + interval '0' day = timestamp '2001-01-01 01:02:03' + and ts + interval '1' day != timestamp '2001-01-01 01:02:03' + and ts + interval '1' day >= timestamp '2001-01-01 01:02:03' + and ts + interval '1' day > timestamp '2001-01-01 01:02:03' + and ts - interval '1' day <= timestamp '2001-01-01 01:02:03' + and ts - interval '1' day < timestamp '2001-01-01 01:02:03' + + and ts = ts + interval '0' day + and ts != ts + interval '1' day + and ts <= ts + interval '1' day + and ts < ts + interval '1' day + and ts >= ts - interval '1' day + and ts > ts - interval '1' day +order by ts +PREHOOK: type: QUERY +PREHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +POSTHOOK: query: select ts from vector_interval_2 +where + timestamp '2001-01-01 01:02:03' = ts + interval '0' day + and timestamp '2001-01-01 01:02:03' != ts + interval '1' day + and timestamp '2001-01-01 01:02:03' <= ts + interval '1' day + and timestamp '2001-01-01 01:02:03' < ts + interval '1' day + and timestamp '2001-01-01 01:02:03' >= ts - interval '1' day + and timestamp '2001-01-01 01:02:03' > ts - interval '1' day + + and ts + interval '0' day = timestamp '2001-01-01 01:02:03' + and ts + interval '1' day != timestamp '2001-01-01 01:02:03' + and ts + interval '1' day >= timestamp '2001-01-01 01:02:03' + and ts + interval '1' day > timestamp '2001-01-01 01:02:03' + and ts - interval '1' day <= timestamp '2001-01-01 01:02:03' + and ts - interval '1' day < timestamp '2001-01-01 01:02:03' + + and ts = ts + interval '0' day + and ts != ts + interval '1' day + and ts <= ts + interval '1' day + and ts < ts + interval '1' day + and ts >= ts - interval '1' day + and ts > ts - interval '1' day +order by ts +POSTHOOK: type: QUERY +POSTHOOK: Input: default@vector_interval_2 +#### A masked pattern was here #### +2001-01-01 01:02:03 +PREHOOK: query: drop table vector_interval_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@vector_interval_2 +PREHOOK: Output: default@vector_interval_2 +POSTHOOK: query: drop table vector_interval_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@vector_interval_2 +POSTHOOK: Output: default@vector_interval_2 diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java index 2b0b550..bee2340 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java @@ -134,11 +134,15 @@ public static long daysToMillis(int d) { return millisUtc - LOCAL_TIMEZONE.get().getOffset(tmp); } + public static int millisToDays(long millisLocal) { + long millisUtc = millisLocal + LOCAL_TIMEZONE.get().getOffset(millisLocal); + return (int)(millisUtc / MILLIS_PER_DAY); + } + public static int dateToDays(Date d) { // convert to equivalent time in UTC, then get day offset long millisLocal = d.getTime(); - long millisUtc = millisLocal + LOCAL_TIMEZONE.get().getOffset(millisLocal); - return (int)(millisUtc / MILLIS_PER_DAY); + return millisToDays(millisLocal); } public void setFromBytes(byte[] bytes, int offset, int length, VInt vInt) {