diff --git ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java index efccba0..32372e5 100644 --- ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java +++ ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java @@ -232,29 +232,98 @@ {"FilterScalarCompareColumn", "GreaterEqual", "long", "long", ">="}, {"FilterScalarCompareColumn", "GreaterEqual", "double", "long", ">="}, - {"FilterStringColumnCompareScalar", "Equal", "=="}, - {"FilterStringColumnCompareScalar", "NotEqual", "!="}, - {"FilterStringColumnCompareScalar", "Less", "<"}, - {"FilterStringColumnCompareScalar", "LessEqual", "<="}, - {"FilterStringColumnCompareScalar", "Greater", ">"}, - {"FilterStringColumnCompareScalar", "GreaterEqual", ">="}, + {"FilterStringGroupColumnCompareStringGroupScalarBase", "Equal", "=="}, + {"FilterStringGroupColumnCompareStringGroupScalarBase", "NotEqual", "!="}, + {"FilterStringGroupColumnCompareStringGroupScalarBase", "Less", "<"}, + {"FilterStringGroupColumnCompareStringGroupScalarBase", "LessEqual", "<="}, + {"FilterStringGroupColumnCompareStringGroupScalarBase", "Greater", ">"}, + {"FilterStringGroupColumnCompareStringGroupScalarBase", "GreaterEqual", ">="}, + + {"FilterStringGroupColumnCompareStringScalar", "Equal", "=="}, + {"FilterStringGroupColumnCompareStringScalar", "NotEqual", "!="}, + {"FilterStringGroupColumnCompareStringScalar", "Less", "<"}, + {"FilterStringGroupColumnCompareStringScalar", "LessEqual", "<="}, + {"FilterStringGroupColumnCompareStringScalar", "Greater", ">"}, + {"FilterStringGroupColumnCompareStringScalar", "GreaterEqual", ">="}, + + {"FilterStringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "Equal", "=="}, + {"FilterStringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "NotEqual", "!="}, + {"FilterStringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "Less", "<"}, + {"FilterStringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "LessEqual", "<="}, + {"FilterStringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "Greater", ">"}, + {"FilterStringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "GreaterEqual", ">="}, + + {"FilterStringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "Equal", "=="}, + {"FilterStringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "NotEqual", "!="}, + {"FilterStringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "Less", "<"}, + {"FilterStringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "LessEqual", "<="}, + {"FilterStringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "Greater", ">"}, + {"FilterStringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "GreaterEqual", ">="}, {"FilterStringColumnBetween", ""}, {"FilterStringColumnBetween", "!"}, - {"StringColumnCompareScalar", "Equal", "=="}, - {"StringColumnCompareScalar", "NotEqual", "!="}, - {"StringColumnCompareScalar", "Less", "<"}, - {"StringColumnCompareScalar", "LessEqual", "<="}, - {"StringColumnCompareScalar", "Greater", ">"}, - {"StringColumnCompareScalar", "GreaterEqual", ">="}, - - {"FilterStringScalarCompareColumn", "Equal", "=="}, - {"FilterStringScalarCompareColumn", "NotEqual", "!="}, - {"FilterStringScalarCompareColumn", "Less", "<"}, - {"FilterStringScalarCompareColumn", "LessEqual", "<="}, - {"FilterStringScalarCompareColumn", "Greater", ">"}, - {"FilterStringScalarCompareColumn", "GreaterEqual", ">="}, + {"FilterTruncStringColumnBetween", "VarChar", "HiveVarchar", ""}, + {"FilterTruncStringColumnBetween", "VarChar", "HiveVarchar", "!"}, + + {"FilterTruncStringColumnBetween", "Char", "HiveChar", ""}, + {"FilterTruncStringColumnBetween", "Char", "HiveChar", "!"}, + + {"StringGroupColumnCompareStringGroupScalarBase", "Equal", "=="}, + {"StringGroupColumnCompareStringGroupScalarBase", "NotEqual", "!="}, + {"StringGroupColumnCompareStringGroupScalarBase", "Less", "<"}, + {"StringGroupColumnCompareStringGroupScalarBase", "LessEqual", "<="}, + {"StringGroupColumnCompareStringGroupScalarBase", "Greater", ">"}, + {"StringGroupColumnCompareStringGroupScalarBase", "GreaterEqual", ">="}, + + {"StringGroupColumnCompareStringScalar", "Equal", "=="}, + {"StringGroupColumnCompareStringScalar", "NotEqual", "!="}, + {"StringGroupColumnCompareStringScalar", "Less", "<"}, + {"StringGroupColumnCompareStringScalar", "LessEqual", "<="}, + {"StringGroupColumnCompareStringScalar", "Greater", ">"}, + {"StringGroupColumnCompareStringScalar", "GreaterEqual", ">="}, + + {"StringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "Equal", "=="}, + {"StringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "NotEqual", "!="}, + {"StringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "Less", "<"}, + {"StringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "LessEqual", "<="}, + {"StringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "Greater", ">"}, + {"StringGroupColumnCompareTruncStringScalar", "VarChar", "HiveVarchar", "GreaterEqual", ">="}, + + {"StringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "Equal", "=="}, + {"StringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "NotEqual", "!="}, + {"StringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "Less", "<"}, + {"StringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "LessEqual", "<="}, + {"StringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "Greater", ">"}, + {"StringGroupColumnCompareTruncStringScalar", "Char", "HiveChar", "GreaterEqual", ">="}, + + {"FilterStringGroupScalarCompareStringGroupColumnBase", "Equal", "=="}, + {"FilterStringGroupScalarCompareStringGroupColumnBase", "NotEqual", "!="}, + {"FilterStringGroupScalarCompareStringGroupColumnBase", "Less", "<"}, + {"FilterStringGroupScalarCompareStringGroupColumnBase", "LessEqual", "<="}, + {"FilterStringGroupScalarCompareStringGroupColumnBase", "Greater", ">"}, + {"FilterStringGroupScalarCompareStringGroupColumnBase", "GreaterEqual", ">="}, + + {"FilterStringScalarCompareStringGroupColumn", "Equal", "=="}, + {"FilterStringScalarCompareStringGroupColumn", "NotEqual", "!="}, + {"FilterStringScalarCompareStringGroupColumn", "Less", "<"}, + {"FilterStringScalarCompareStringGroupColumn", "LessEqual", "<="}, + {"FilterStringScalarCompareStringGroupColumn", "Greater", ">"}, + {"FilterStringScalarCompareStringGroupColumn", "GreaterEqual", ">="}, + + {"FilterTruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "Equal", "=="}, + {"FilterTruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "NotEqual", "!="}, + {"FilterTruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "Less", "<"}, + {"FilterTruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "LessEqual", "<="}, + {"FilterTruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "Greater", ">"}, + {"FilterTruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "GreaterEqual", ">="}, + + {"FilterTruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "Equal", "=="}, + {"FilterTruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "NotEqual", "!="}, + {"FilterTruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "Less", "<"}, + {"FilterTruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "LessEqual", "<="}, + {"FilterTruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "Greater", ">"}, + {"FilterTruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "GreaterEqual", ">="}, {"FilterDecimalColumnCompareScalar", "Equal", "=="}, {"FilterDecimalColumnCompareScalar", "NotEqual", "!="}, @@ -277,26 +346,47 @@ {"FilterDecimalColumnCompareColumn", "Greater", ">"}, {"FilterDecimalColumnCompareColumn", "GreaterEqual", ">="}, - {"StringScalarCompareColumn", "Equal", "=="}, - {"StringScalarCompareColumn", "NotEqual", "!="}, - {"StringScalarCompareColumn", "Less", "<"}, - {"StringScalarCompareColumn", "LessEqual", "<="}, - {"StringScalarCompareColumn", "Greater", ">"}, - {"StringScalarCompareColumn", "GreaterEqual", ">="}, - - {"FilterStringColumnCompareColumn", "Equal", "=="}, - {"FilterStringColumnCompareColumn", "NotEqual", "!="}, - {"FilterStringColumnCompareColumn", "Less", "<"}, - {"FilterStringColumnCompareColumn", "LessEqual", "<="}, - {"FilterStringColumnCompareColumn", "Greater", ">"}, - {"FilterStringColumnCompareColumn", "GreaterEqual", ">="}, - - {"StringColumnCompareColumn", "Equal", "=="}, - {"StringColumnCompareColumn", "NotEqual", "!="}, - {"StringColumnCompareColumn", "Less", "<"}, - {"StringColumnCompareColumn", "LessEqual", "<="}, - {"StringColumnCompareColumn", "Greater", ">"}, - {"StringColumnCompareColumn", "GreaterEqual", ">="}, + {"StringGroupScalarCompareStringGroupColumnBase", "Equal", "=="}, + {"StringGroupScalarCompareStringGroupColumnBase", "NotEqual", "!="}, + {"StringGroupScalarCompareStringGroupColumnBase", "Less", "<"}, + {"StringGroupScalarCompareStringGroupColumnBase", "LessEqual", "<="}, + {"StringGroupScalarCompareStringGroupColumnBase", "Greater", ">"}, + {"StringGroupScalarCompareStringGroupColumnBase", "GreaterEqual", ">="}, + + {"StringScalarCompareStringGroupColumn", "Equal", "=="}, + {"StringScalarCompareStringGroupColumn", "NotEqual", "!="}, + {"StringScalarCompareStringGroupColumn", "Less", "<"}, + {"StringScalarCompareStringGroupColumn", "LessEqual", "<="}, + {"StringScalarCompareStringGroupColumn", "Greater", ">"}, + {"StringScalarCompareStringGroupColumn", "GreaterEqual", ">="}, + + {"TruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "Equal", "=="}, + {"TruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "NotEqual", "!="}, + {"TruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "Less", "<"}, + {"TruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "LessEqual", "<="}, + {"TruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "Greater", ">"}, + {"TruncStringScalarCompareStringGroupColumn", "VarChar", "HiveVarchar", "GreaterEqual", ">="}, + + {"TruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "Equal", "=="}, + {"TruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "NotEqual", "!="}, + {"TruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "Less", "<"}, + {"TruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "LessEqual", "<="}, + {"TruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "Greater", ">"}, + {"TruncStringScalarCompareStringGroupColumn", "Char", "HiveChar", "GreaterEqual", ">="}, + + {"FilterStringGroupColumnCompareStringGroupColumn", "Equal", "=="}, + {"FilterStringGroupColumnCompareStringGroupColumn", "NotEqual", "!="}, + {"FilterStringGroupColumnCompareStringGroupColumn", "Less", "<"}, + {"FilterStringGroupColumnCompareStringGroupColumn", "LessEqual", "<="}, + {"FilterStringGroupColumnCompareStringGroupColumn", "Greater", ">"}, + {"FilterStringGroupColumnCompareStringGroupColumn", "GreaterEqual", ">="}, + + {"StringGroupColumnCompareStringGroupColumn", "Equal", "=="}, + {"StringGroupColumnCompareStringGroupColumn", "NotEqual", "!="}, + {"StringGroupColumnCompareStringGroupColumn", "Less", "<"}, + {"StringGroupColumnCompareStringGroupColumn", "LessEqual", "<="}, + {"StringGroupColumnCompareStringGroupColumn", "Greater", ">"}, + {"StringGroupColumnCompareStringGroupColumn", "GreaterEqual", ">="}, {"FilterColumnCompareColumn", "Equal", "long", "double", "=="}, {"FilterColumnCompareColumn", "Equal", "double", "double", "=="}, @@ -658,22 +748,40 @@ private void generate() throws Exception { generateVectorUDAFVar(tdesc); } else if (tdesc[0].equals("VectorUDAFVarDecimal")) { generateVectorUDAFVarDecimal(tdesc); - } else if (tdesc[0].equals("FilterStringColumnCompareScalar")) { - generateFilterStringColumnCompareScalar(tdesc); + } else if (tdesc[0].equals("FilterStringGroupColumnCompareStringGroupScalarBase")) { + generateFilterStringGroupColumnCompareStringGroupScalarBase(tdesc); + } else if (tdesc[0].equals("FilterStringGroupColumnCompareStringScalar")) { + generateFilterStringGroupColumnCompareStringScalar(tdesc); + } else if (tdesc[0].equals("FilterStringGroupColumnCompareTruncStringScalar")) { + generateFilterStringGroupColumnCompareTruncStringScalar(tdesc); } else if (tdesc[0].equals("FilterStringColumnBetween")) { generateFilterStringColumnBetween(tdesc); + } else if (tdesc[0].equals("FilterTruncStringColumnBetween")) { + generateFilterTruncStringColumnBetween(tdesc); } else if (tdesc[0].equals("FilterDecimalColumnBetween")) { generateFilterDecimalColumnBetween(tdesc); - } else if (tdesc[0].equals("StringColumnCompareScalar")) { - generateStringColumnCompareScalar(tdesc); - } else if (tdesc[0].equals("FilterStringScalarCompareColumn")) { - generateFilterStringScalarCompareColumn(tdesc); - } else if (tdesc[0].equals("StringScalarCompareColumn")) { - generateStringScalarCompareColumn(tdesc); - } else if (tdesc[0].equals("FilterStringColumnCompareColumn")) { - generateFilterStringColumnCompareColumn(tdesc); - } else if (tdesc[0].equals("StringColumnCompareColumn")) { - generateStringColumnCompareColumn(tdesc); + } else if (tdesc[0].equals("StringGroupColumnCompareStringGroupScalarBase")) { + generateStringGroupColumnCompareStringGroupScalarBase(tdesc); + } else if (tdesc[0].equals("StringGroupColumnCompareStringScalar")) { + generateStringGroupColumnCompareStringScalar(tdesc); + } else if (tdesc[0].equals("StringGroupColumnCompareTruncStringScalar")) { + generateStringGroupColumnCompareTruncStringScalar(tdesc); + } else if (tdesc[0].equals("FilterStringGroupScalarCompareStringGroupColumnBase")) { + generateFilterStringGroupScalarCompareStringGroupColumnBase(tdesc); + } else if (tdesc[0].equals("FilterStringScalarCompareStringGroupColumn")) { + generateFilterStringScalarCompareStringGroupColumn(tdesc); + } else if (tdesc[0].equals("FilterTruncStringScalarCompareStringGroupColumn")) { + generateFilterTruncStringScalarCompareStringGroupColumn(tdesc); + } else if (tdesc[0].equals("StringGroupScalarCompareStringGroupColumnBase")) { + generateStringGroupScalarCompareStringGroupColumnBase(tdesc); + } else if (tdesc[0].equals("StringScalarCompareStringGroupColumn")) { + generateStringScalarCompareStringGroupColumn(tdesc); + } else if (tdesc[0].equals("TruncStringScalarCompareStringGroupColumn")) { + generateTruncStringScalarCompareStringGroupColumn(tdesc); + } else if (tdesc[0].equals("FilterStringGroupColumnCompareStringGroupColumn")) { + generateFilterStringGroupColumnCompareStringGroupColumn(tdesc); + } else if (tdesc[0].equals("StringGroupColumnCompareStringGroupColumn")) { + generateStringGroupColumnCompareStringGroupColumn(tdesc); } else if (tdesc[0].equals("IfExprColumnColumn")) { generateIfExprColumnColumn(tdesc); } else if (tdesc[0].equals("IfExprColumnScalar")) { @@ -710,6 +818,24 @@ private void generateFilterStringColumnBetween(String[] tdesc) throws IOExceptio className, templateString); } + private void generateFilterTruncStringColumnBetween(String[] tdesc) throws IOException { + String truncStringTypeName = tdesc[1]; + String truncStringHiveType = tdesc[2]; + String optionalNot = tdesc[3]; + String className = "Filter" + truncStringTypeName + "Column" + (optionalNot.equals("!") ? "Not" : "") + + "Between"; + // Read the template into a string, expand it, and write it. + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + templateString = templateString.replaceAll("", truncStringTypeName); + templateString = templateString.replaceAll("", truncStringHiveType); + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", optionalNot); + + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + } + private void generateFilterDecimalColumnBetween(String[] tdesc) throws IOException { String optionalNot = tdesc[1]; String className = "FilterDecimalColumn" + (optionalNot.equals("!") ? "Not" : "") @@ -887,44 +1013,147 @@ private void generateVectorUDAFVarDecimal(String[] tdesc) throws Exception { className, templateString); } - - private void generateFilterStringScalarCompareColumn(String[] tdesc) throws IOException { + private void generateFilterStringGroupScalarCompareStringGroupColumnBase(String[] tdesc) throws IOException { String operatorName = tdesc[1]; - String className = "FilterStringScalar" + operatorName + "StringColumn"; + String className = "FilterStringGroupScalar" + operatorName + "StringGroupColumnBase"; // Template expansion logic is the same for both column-scalar and scalar-column cases. generateStringColumnCompareScalar(tdesc, className); } + + private void generateFilterStringScalarCompareStringGroupColumn(String[] tdesc) throws IOException { + String operatorName = tdesc[1]; + String className = "FilterStringScalar" + operatorName + "StringGroupColumn"; + String baseClassName = "FilterStringGroupScalar" + operatorName + "StringGroupColumnBase"; + String operatorSymbol = tdesc[2]; + // Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + // Expand, and write result + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", baseClassName); + templateString = templateString.replaceAll("", operatorSymbol); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + } - private void generateStringScalarCompareColumn(String[] tdesc) throws IOException { + private void generateFilterTruncStringScalarCompareStringGroupColumn(String[] tdesc) throws IOException { + String truncStringTypeName = tdesc[1]; + String operatorName = tdesc[3]; + String className = "Filter" + truncStringTypeName + "Scalar" + operatorName + "StringGroupColumn"; + String baseClassName = "FilterStringGroupScalar" + operatorName + "StringGroupColumnBase"; + generateStringCompareTruncStringScalar(tdesc, className, baseClassName); + } + + private void generateStringGroupScalarCompareStringGroupColumnBase(String[] tdesc) throws IOException { String operatorName = tdesc[1]; - String className = "StringScalar" + operatorName + "StringColumn"; + String className = "StringGroupScalar" + operatorName + "StringGroupColumnBase"; // Template expansion logic is the same for both column-scalar and scalar-column cases. generateStringColumnCompareScalar(tdesc, className); } - private void generateFilterStringColumnCompareScalar(String[] tdesc) throws IOException { + private void generateStringScalarCompareStringGroupColumn(String[] tdesc) throws IOException { String operatorName = tdesc[1]; - String className = "FilterStringCol" + operatorName + "StringScalar"; - generateStringColumnCompareScalar(tdesc, className); + String className = "StringScalar" + operatorName + "StringGroupColumn"; + String baseClassName = "StringGroupScalar" + operatorName + "StringGroupColumnBase"; + String operatorSymbol = tdesc[2]; + // Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + // Expand, and write result + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", baseClassName); + templateString = templateString.replaceAll("", operatorSymbol); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); } + + private void generateTruncStringScalarCompareStringGroupColumn(String[] tdesc) throws IOException { + String truncStringTypeName = tdesc[1]; + String operatorName = tdesc[3]; + String className = truncStringTypeName + "Scalar" + operatorName + "StringGroupColumn"; + String baseClassName = "StringGroupScalar" + operatorName + "StringGroupColumnBase"; + generateStringCompareTruncStringScalar(tdesc, className, baseClassName); } - private void generateStringColumnCompareScalar(String[] tdesc) throws IOException { + private void generateFilterStringGroupColumnCompareStringGroupScalarBase(String[] tdesc) throws IOException { String operatorName = tdesc[1]; - String className = "StringCol" + operatorName + "StringScalar"; + String className = "FilterStringGroupCol" + operatorName + "StringGroupScalarBase"; generateStringColumnCompareScalar(tdesc, className); } - private void generateFilterStringColumnCompareColumn(String[] tdesc) throws IOException { + private void generateFilterStringGroupColumnCompareStringScalar(String[] tdesc) throws IOException { + String operatorName = tdesc[1]; + String className = "FilterStringGroupCol" + operatorName + "StringScalar"; + String baseClassName = "FilterStringGroupCol" + operatorName + "StringGroupScalarBase"; + String operatorSymbol = tdesc[2]; + // Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + // Expand, and write result + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", baseClassName); + templateString = templateString.replaceAll("", operatorSymbol); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + } + + private void generateFilterStringGroupColumnCompareTruncStringScalar(String[] tdesc) throws IOException { + String truncStringTypeName = tdesc[1]; + String truncStringHiveType = tdesc[2]; + String operatorName = tdesc[3]; + String className = "FilterStringGroupCol" + operatorName + truncStringTypeName + "Scalar"; + String baseClassName = "FilterStringGroupCol" + operatorName + "StringGroupScalarBase"; + generateStringCompareTruncStringScalar(tdesc, className, baseClassName); + } + + private void generateStringGroupColumnCompareStringGroupScalarBase(String[] tdesc) throws IOException { + String operatorName = tdesc[1]; + String className = "StringGroupCol" + operatorName + "StringGroupScalarBase"; + String operatorSymbol = tdesc[2]; + // Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + // Expand, and write result + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", operatorSymbol); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + } + + private void generateStringGroupColumnCompareStringScalar(String[] tdesc) throws IOException { + String operatorName = tdesc[1]; + String className = "StringGroupCol" + operatorName + "StringScalar"; + String baseClassName = "StringGroupCol" + operatorName + "StringGroupScalarBase"; + String operatorSymbol = tdesc[2]; + // Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + // Expand, and write result + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", baseClassName); + templateString = templateString.replaceAll("", operatorSymbol); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + } + + private void generateStringGroupColumnCompareTruncStringScalar(String[] tdesc) throws IOException { + String truncStringTypeName = tdesc[1]; + String operatorName = tdesc[3]; + String className = "StringGroupCol" + operatorName + truncStringTypeName + "Scalar"; + String baseClassName = "StringGroupCol" + operatorName + "StringGroupScalarBase"; + generateStringCompareTruncStringScalar(tdesc, className, baseClassName); + } + + private void generateFilterStringGroupColumnCompareStringGroupColumn(String[] tdesc) throws IOException { String operatorName = tdesc[1]; - String className = "FilterStringCol" + operatorName + "StringColumn"; + String className = "FilterStringGroupCol" + operatorName + "StringGroupColumn"; generateStringColumnCompareScalar(tdesc, className); } - private void generateStringColumnCompareColumn(String[] tdesc) throws IOException { + private void generateStringGroupColumnCompareStringGroupColumn(String[] tdesc) throws IOException { String operatorName = tdesc[1]; - String className = "StringCol" + operatorName + "StringColumn"; + String className = "StringGroupCol" + operatorName + "StringGroupColumn"; generateStringColumnCompareScalar(tdesc, className); } @@ -941,6 +1170,24 @@ private void generateStringColumnCompareScalar(String[] tdesc, String className) className, templateString); } + private void generateStringCompareTruncStringScalar(String[] tdesc, String className, String baseClassName) + throws IOException { + String truncStringTypeName = tdesc[1]; + String truncStringHiveType = tdesc[2]; + String operatorSymbol = tdesc[4]; + // Read the template into a string; + File templateFile = new File(joinPath(this.expressionTemplateDirectory, tdesc[0] + ".txt")); + String templateString = readFile(templateFile); + // Expand, and write result + templateString = templateString.replaceAll("", className); + templateString = templateString.replaceAll("", baseClassName); + templateString = templateString.replaceAll("", operatorSymbol); + templateString = templateString.replaceAll("", truncStringTypeName); + templateString = templateString.replaceAll("", truncStringHiveType); + writeFile(templateFile.lastModified(), expressionOutputDirectory, expressionClassesDirectory, + className, templateString); + } + private void generateFilterColumnCompareColumn(String[] tdesc) throws Exception { //The variables are all same as ColumnCompareScalar except that //this template doesn't need a return type. Pass anything as return type. diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnBetween.txt ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnBetween.txt index 8b6e4b7..e8049da 100644 --- ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnBetween.txt +++ ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnBetween.txt @@ -184,9 +184,9 @@ public class extends VectorExpression { VectorExpressionDescriptor.Mode.FILTER) .setNumArguments(3) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) + VectorExpressionDescriptor.ArgumentType.STRING, + VectorExpressionDescriptor.ArgumentType.STRING, + VectorExpressionDescriptor.ArgumentType.STRING) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN, VectorExpressionDescriptor.InputExpressionType.SCALAR, diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnCompareColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnCompareColumn.txt deleted file mode 100644 index 1d8140f..0000000 --- ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnCompareColumn.txt +++ /dev/null @@ -1,492 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; - -/** - * Filter the rows in a batch by comparing one string column to another. - * This code is generated from a template. - */ -public class extends VectorExpression { - - private static final long serialVersionUID = 1L; - - private int colNum1; - private int colNum2; - - public (int colNum1, int colNum2) { - this.colNum1 = colNum1; - this.colNum2 = colNum2; - } - - public () { - } - - @Override - public void evaluate(VectorizedRowBatch batch) { - - if (childExpressions != null) { - super.evaluateChildren(batch); - } - - BytesColumnVector inputColVector1 = (BytesColumnVector) batch.cols[colNum1]; - BytesColumnVector inputColVector2 = (BytesColumnVector) batch.cols[colNum2]; - int[] sel = batch.selected; - boolean[] nullPos1 = inputColVector1.isNull; - boolean[] nullPos2 = inputColVector2.isNull; - int n = batch.size; - byte[][] vector1 = inputColVector1.vector; - byte[][] vector2 = inputColVector2.vector; - int[] start1 = inputColVector1.start; - int[] start2 = inputColVector2.start; - int[] length1 = inputColVector1.length; - int[] length2 = inputColVector2.length; - - // return immediately if batch is empty - if (n == 0) { - return; - } - - // handle case where neither input has nulls - if (inputColVector1.noNulls && inputColVector2.noNulls) { - if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - - /* Either all must remain selected or all will be eliminated. - * Repeating property will not change. - */ - if (!(StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[0], start2[0], length2[0]) 0)) { - batch.size = 0; - } - } else if (inputColVector1.isRepeating) { - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } else if (inputColVector2.isRepeating) { - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - sel[newSize++] = i; - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - sel[newSize++] = i; - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } else if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - - // handle case where only input 2 has nulls - } else if (inputColVector1.noNulls) { - if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - if (nullPos2[0] || - !(StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[0], start2[0], length2[0]) 0)) { - batch.size = 0; - } - } else if (inputColVector1.isRepeating) { - - // no need to check for nulls in input 1 - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } else if (inputColVector2.isRepeating) { - if (nullPos2[0]) { - - // no values will qualify because every comparison will be with NULL - batch.size = 0; - return; - } - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - sel[newSize++] = i; - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - sel[newSize++] = i; - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } else { // neither input is repeating - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } - - // handle case where only input 1 has nulls - } else if (inputColVector2.noNulls) { - if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - if (nullPos1[0] || - !(StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[0], start2[0], length2[0]) 0)) { - batch.size = 0; - return; - } - } else if (inputColVector1.isRepeating) { - if (nullPos1[0]) { - - // if repeating value is null then every comparison will fail so nothing qualifies - batch.size = 0; - return; - } - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } else if (inputColVector2.isRepeating) { - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - sel[newSize++] = i; - } - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - sel[newSize++] = i; - } - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } else { // neither input is repeating - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } - - // handle case where both inputs have nulls - } else { - if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - if (nullPos1[0] || nullPos2[0] || - !(StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[0], start2[0], length2[0]) 0)) { - batch.size = 0; - } - } else if (inputColVector1.isRepeating) { - if (nullPos1[0]) { - batch.size = 0; - return; - } - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } else if (inputColVector2.isRepeating) { - if (nullPos2[0]) { - batch.size = 0; - return; - } - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - sel[newSize++] = i; - } - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - sel[newSize++] = i; - } - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } else { // neither input is repeating - if (batch.selectedInUse) { - int newSize = 0; - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (!nullPos1[i] && !nullPos2[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (!nullPos1[i] && !nullPos2[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - sel[newSize++] = i; - } - } - } - if (newSize < batch.size) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } - } - } - - @Override - public String getOutputType() { - return "boolean"; - } - - @Override - public int getOutputColumn() { - return -1; - } - - public int getColNum1() { - return colNum1; - } - - public void setColNum1(int colNum1) { - this.colNum1 = colNum1; - } - - public int getColNum2() { - return colNum2; - } - - public void setColNum2(int colNum2) { - this.colNum2 = colNum2; - } - - @Override - public VectorExpressionDescriptor.Descriptor getDescriptor() { - return (new VectorExpressionDescriptor.Builder()) - .setMode( - VectorExpressionDescriptor.Mode.FILTER) - .setNumArguments(2) - .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) - .setInputExpressionTypes( - VectorExpressionDescriptor.InputExpressionType.COLUMN, - VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); - } -} diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnCompareScalar.txt ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnCompareScalar.txt deleted file mode 100644 index cba51bc..0000000 --- ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnCompareScalar.txt +++ /dev/null @@ -1,176 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; - -/** - * This is a generated class to evaluate a comparison on a vector of strings. - */ -public class extends VectorExpression { - - private static final long serialVersionUID = 1L; - - private int colNum; - private byte[] value; - - public (int colNum, byte[] value) { - this.colNum = colNum; - this.value = value; - } - - public () { - } - - @Override - public void evaluate(VectorizedRowBatch batch) { - if (childExpressions != null) { - super.evaluateChildren(batch); - } - BytesColumnVector inputColVector = (BytesColumnVector) batch.cols[colNum]; - int[] sel = batch.selected; - boolean[] nullPos = inputColVector.isNull; - int n = batch.size; - byte[][] vector = inputColVector.vector; - int[] length = inputColVector.length; - int[] start = inputColVector.start; - - - // return immediately if batch is empty - if (n == 0) { - return; - } - - if (inputColVector.noNulls) { - if (inputColVector.isRepeating) { - - // All must be selected otherwise size would be zero. Repeating property will not change. - if (!(StringExpr.compare(vector[0], start[0], length[0], value, 0, value.length) 0)) { - - //Entire batch is filtered out. - batch.size = 0; - } - } else if (batch.selectedInUse) { - int newSize = 0; - for(int j=0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { - sel[newSize++] = i; - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { - sel[newSize++] = i; - } - } - if (newSize < n) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } else { - if (inputColVector.isRepeating) { - - // All must be selected otherwise size would be zero. Repeating property will not change. - if (!nullPos[0]) { - if (!(StringExpr.compare(vector[0], start[0], length[0], value, 0, value.length) 0)) { - - //Entire batch is filtered out. - batch.size = 0; - } - } else { - batch.size = 0; - } - } else if (batch.selectedInUse) { - int newSize = 0; - for(int j=0; j != n; j++) { - int i = sel[j]; - if (!nullPos[i]) { - if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { - sel[newSize++] = i; - } - } - } - - //Change the selected vector - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (!nullPos[i]) { - if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { - sel[newSize++] = i; - } - } - } - if (newSize < n) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } - } - - @Override - public int getOutputColumn() { - return -1; - } - - @Override - public String getOutputType() { - return "boolean"; - } - - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public byte[] getValue() { - return value; - } - - public void setValue(byte[] value) { - this.value = value; - } - - @Override - public VectorExpressionDescriptor.Descriptor getDescriptor() { - return (new VectorExpressionDescriptor.Builder()) - .setMode( - VectorExpressionDescriptor.Mode.FILTER) - .setNumArguments(2) - .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) - .setInputExpressionTypes( - VectorExpressionDescriptor.InputExpressionType.COLUMN, - VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); - } -} diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareStringGroupColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareStringGroupColumn.txt new file mode 100644 index 0000000..3bc4e33 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareStringGroupColumn.txt @@ -0,0 +1,492 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Filter the rows in a batch by comparing one string column to another. + * This code is generated from a template. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum1; + private int colNum2; + + public (int colNum1, int colNum2) { + this.colNum1 = colNum1; + this.colNum2 = colNum2; + } + + public () { + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + BytesColumnVector inputColVector1 = (BytesColumnVector) batch.cols[colNum1]; + BytesColumnVector inputColVector2 = (BytesColumnVector) batch.cols[colNum2]; + int[] sel = batch.selected; + boolean[] nullPos1 = inputColVector1.isNull; + boolean[] nullPos2 = inputColVector2.isNull; + int n = batch.size; + byte[][] vector1 = inputColVector1.vector; + byte[][] vector2 = inputColVector2.vector; + int[] start1 = inputColVector1.start; + int[] start2 = inputColVector2.start; + int[] length1 = inputColVector1.length; + int[] length2 = inputColVector2.length; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + // handle case where neither input has nulls + if (inputColVector1.noNulls && inputColVector2.noNulls) { + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + + /* Either all must remain selected or all will be eliminated. + * Repeating property will not change. + */ + if (!(StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[0], start2[0], length2[0]) 0)) { + batch.size = 0; + } + } else if (inputColVector1.isRepeating) { + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else if (inputColVector2.isRepeating) { + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + sel[newSize++] = i; + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + sel[newSize++] = i; + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + + // handle case where only input 2 has nulls + } else if (inputColVector1.noNulls) { + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + if (nullPos2[0] || + !(StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[0], start2[0], length2[0]) 0)) { + batch.size = 0; + } + } else if (inputColVector1.isRepeating) { + + // no need to check for nulls in input 1 + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else if (inputColVector2.isRepeating) { + if (nullPos2[0]) { + + // no values will qualify because every comparison will be with NULL + batch.size = 0; + return; + } + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + sel[newSize++] = i; + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + sel[newSize++] = i; + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else { // neither input is repeating + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } + + // handle case where only input 1 has nulls + } else if (inputColVector2.noNulls) { + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + if (nullPos1[0] || + !(StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[0], start2[0], length2[0]) 0)) { + batch.size = 0; + return; + } + } else if (inputColVector1.isRepeating) { + if (nullPos1[0]) { + + // if repeating value is null then every comparison will fail so nothing qualifies + batch.size = 0; + return; + } + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else if (inputColVector2.isRepeating) { + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + sel[newSize++] = i; + } + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + sel[newSize++] = i; + } + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else { // neither input is repeating + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } + + // handle case where both inputs have nulls + } else { + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + if (nullPos1[0] || nullPos2[0] || + !(StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[0], start2[0], length2[0]) 0)) { + batch.size = 0; + } + } else if (inputColVector1.isRepeating) { + if (nullPos1[0]) { + batch.size = 0; + return; + } + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else if (inputColVector2.isRepeating) { + if (nullPos2[0]) { + batch.size = 0; + return; + } + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + sel[newSize++] = i; + } + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + sel[newSize++] = i; + } + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else { // neither input is repeating + if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!nullPos1[i] && !nullPos2[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (!nullPos1[i] && !nullPos2[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + sel[newSize++] = i; + } + } + } + if (newSize < batch.size) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } + } + } + + @Override + public String getOutputType() { + return "boolean"; + } + + @Override + public int getOutputColumn() { + return -1; + } + + public int getColNum1() { + return colNum1; + } + + public void setColNum1(int colNum1) { + this.colNum1 = colNum1; + } + + public int getColNum2() { + return colNum2; + } + + public void setColNum2(int colNum2) { + this.colNum2 = colNum2; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.FILTER) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} \ No newline at end of file diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareStringGroupScalarBase.txt ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareStringGroupScalarBase.txt new file mode 100644 index 0000000..1c868ba --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareStringGroupScalarBase.txt @@ -0,0 +1,155 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + */ +public abstract class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + protected int colNum; + protected byte[] value; + + @Override + public void evaluate(VectorizedRowBatch batch) { + if (childExpressions != null) { + super.evaluateChildren(batch); + } + BytesColumnVector inputColVector = (BytesColumnVector) batch.cols[colNum]; + int[] sel = batch.selected; + boolean[] nullPos = inputColVector.isNull; + int n = batch.size; + byte[][] vector = inputColVector.vector; + int[] length = inputColVector.length; + int[] start = inputColVector.start; + + + // return immediately if batch is empty + if (n == 0) { + return; + } + + if (inputColVector.noNulls) { + if (inputColVector.isRepeating) { + + // All must be selected otherwise size would be zero. Repeating property will not change. + if (!(StringExpr.compare(vector[0], start[0], length[0], value, 0, value.length) 0)) { + + //Entire batch is filtered out. + batch.size = 0; + } + } else if (batch.selectedInUse) { + int newSize = 0; + for(int j=0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { + sel[newSize++] = i; + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { + sel[newSize++] = i; + } + } + if (newSize < n) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else { + if (inputColVector.isRepeating) { + + // All must be selected otherwise size would be zero. Repeating property will not change. + if (!nullPos[0]) { + if (!(StringExpr.compare(vector[0], start[0], length[0], value, 0, value.length) 0)) { + + //Entire batch is filtered out. + batch.size = 0; + } + } else { + batch.size = 0; + } + } else if (batch.selectedInUse) { + int newSize = 0; + for(int j=0; j != n; j++) { + int i = sel[j]; + if (!nullPos[i]) { + if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { + sel[newSize++] = i; + } + } + } + + //Change the selected vector + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (!nullPos[i]) { + if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { + sel[newSize++] = i; + } + } + } + if (newSize < n) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } + } + + @Override + public int getOutputColumn() { + return -1; + } + + @Override + public String getOutputType() { + return "boolean"; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public byte[] getValue() { + return value; + } + + public void setValue(byte[] value) { + this.value = value; + } + +} \ No newline at end of file diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareStringScalar.txt ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareStringScalar.txt new file mode 100644 index 0000000..02d7819 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareStringScalar.txt @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + */ +public class extends { + + public (int colNum, byte[] value) { + this.colNum = colNum; + this.value = value; + } + + public () { + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.FILTER) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.STRING) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} \ No newline at end of file diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareTruncStringScalar.txt ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareTruncStringScalar.txt new file mode 100644 index 0000000..f34107a --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupColumnCompareTruncStringScalar.txt @@ -0,0 +1,57 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.; + +import org.apache.hadoop.hive.common.type.; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + */ +public class extends { + + public (int colNum, value) { + this.colNum = colNum; + this.value = value.getValue().getBytes(); + } + + public () { + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.FILTER) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupScalarCompareStringGroupColumnBase.txt ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupScalarCompareStringGroupColumnBase.txt new file mode 100644 index 0000000..91f5909 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/FilterStringGroupScalarCompareStringGroupColumnBase.txt @@ -0,0 +1,158 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + * Do not edit the generated code directly. + */ +public abstract class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + protected int colNum; + protected byte[] value; + + public () { + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + if (childExpressions != null) { + super.evaluateChildren(batch); + } + BytesColumnVector inputColVector = (BytesColumnVector) batch.cols[colNum]; + int[] sel = batch.selected; + boolean[] nullPos = inputColVector.isNull; + int n = batch.size; + byte[][] vector = inputColVector.vector; + int[] length = inputColVector.length; + int[] start = inputColVector.start; + + + // return immediately if batch is empty + if (n == 0) { + return; + } + + if (inputColVector.noNulls) { + if (inputColVector.isRepeating) { + + // All must be selected otherwise size would be zero. Repeating property will not change. + if (!(StringExpr.compare(value, 0, value.length, vector[0], start[0], length[0]) 0)) { + + //Entire batch is filtered out. + batch.size = 0; + } + } else if (batch.selectedInUse) { + int newSize = 0; + for(int j=0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { + sel[newSize++] = i; + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { + sel[newSize++] = i; + } + } + if (newSize < n) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else { + if (inputColVector.isRepeating) { + + // All must be selected otherwise size would be zero. Repeating property will not change. + if (!nullPos[0]) { + if (!(StringExpr.compare(value, 0, value.length, vector[0], start[0], length[0]) 0)) { + + //Entire batch is filtered out. + batch.size = 0; + } + } else { + batch.size = 0; + } + } else if (batch.selectedInUse) { + int newSize = 0; + for(int j=0; j != n; j++) { + int i = sel[j]; + if (!nullPos[i]) { + if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { + sel[newSize++] = i; + } + } + } + + //Change the selected vector + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (!nullPos[i]) { + if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { + sel[newSize++] = i; + } + } + } + if (newSize < n) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } + } + + @Override + public int getOutputColumn() { + return -1; + } + + @Override + public String getOutputType() { + return "boolean"; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public byte[] getValue() { + return value; + } + + public void setValue(byte[] value) { + this.value = value; + } +} diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterStringScalarCompareColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterStringScalarCompareColumn.txt deleted file mode 100644 index f41ec67..0000000 --- ql/src/gen/vectorization/ExpressionTemplates/FilterStringScalarCompareColumn.txt +++ /dev/null @@ -1,177 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; - -/** - * This is a generated class to evaluate a comparison on a vector of strings. - * Do not edit the generated code directly. - */ -public class extends VectorExpression { - - private static final long serialVersionUID = 1L; - - private int colNum; - private byte[] value; - - public (byte[] value, int colNum) { - this.colNum = colNum; - this.value = value; - } - - public () { - } - - @Override - public void evaluate(VectorizedRowBatch batch) { - if (childExpressions != null) { - super.evaluateChildren(batch); - } - BytesColumnVector inputColVector = (BytesColumnVector) batch.cols[colNum]; - int[] sel = batch.selected; - boolean[] nullPos = inputColVector.isNull; - int n = batch.size; - byte[][] vector = inputColVector.vector; - int[] length = inputColVector.length; - int[] start = inputColVector.start; - - - // return immediately if batch is empty - if (n == 0) { - return; - } - - if (inputColVector.noNulls) { - if (inputColVector.isRepeating) { - - // All must be selected otherwise size would be zero. Repeating property will not change. - if (!(StringExpr.compare(value, 0, value.length, vector[0], start[0], length[0]) 0)) { - - //Entire batch is filtered out. - batch.size = 0; - } - } else if (batch.selectedInUse) { - int newSize = 0; - for(int j=0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { - sel[newSize++] = i; - } - } - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { - sel[newSize++] = i; - } - } - if (newSize < n) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } else { - if (inputColVector.isRepeating) { - - // All must be selected otherwise size would be zero. Repeating property will not change. - if (!nullPos[0]) { - if (!(StringExpr.compare(value, 0, value.length, vector[0], start[0], length[0]) 0)) { - - //Entire batch is filtered out. - batch.size = 0; - } - } else { - batch.size = 0; - } - } else if (batch.selectedInUse) { - int newSize = 0; - for(int j=0; j != n; j++) { - int i = sel[j]; - if (!nullPos[i]) { - if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { - sel[newSize++] = i; - } - } - } - - //Change the selected vector - batch.size = newSize; - } else { - int newSize = 0; - for(int i = 0; i != n; i++) { - if (!nullPos[i]) { - if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { - sel[newSize++] = i; - } - } - } - if (newSize < n) { - batch.size = newSize; - batch.selectedInUse = true; - } - } - } - } - - @Override - public int getOutputColumn() { - return -1; - } - - @Override - public String getOutputType() { - return "boolean"; - } - - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public byte[] getValue() { - return value; - } - - public void setValue(byte[] value) { - this.value = value; - } - - @Override - public VectorExpressionDescriptor.Descriptor getDescriptor() { - return (new VectorExpressionDescriptor.Builder()) - .setMode( - VectorExpressionDescriptor.Mode.FILTER) - .setNumArguments(2) - .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) - .setInputExpressionTypes( - VectorExpressionDescriptor.InputExpressionType.SCALAR, - VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); - } -} diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterStringScalarCompareStringGroupColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterStringScalarCompareStringGroupColumn.txt new file mode 100644 index 0000000..66472d5 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/FilterStringScalarCompareStringGroupColumn.txt @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + * Do not edit the generated code directly. + */ +public class extends { + + public (byte[] value, int colNum) { + this.colNum = colNum; + this.value = value; + } + + public () { + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.FILTER) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.STRING, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterTruncStringColumnBetween.txt ql/src/gen/vectorization/ExpressionTemplates/FilterTruncStringColumnBetween.txt new file mode 100644 index 0000000..3670461 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/FilterTruncStringColumnBetween.txt @@ -0,0 +1,198 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.common.type.; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + + +/** + * This is a generated class to evaluate a [NOT] BETWEEN comparison on a vector of strings. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum; + private byte[] left; + private byte[] right; + + public (int colNum, left, right) { + this.colNum = colNum; + this.left = left.getValue().getBytes(); + this.right = right.getValue().getBytes(); + } + + public () { + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + if (childExpressions != null) { + super.evaluateChildren(batch); + } + BytesColumnVector inputColVector = (BytesColumnVector) batch.cols[colNum]; + int[] sel = batch.selected; + boolean[] nullPos = inputColVector.isNull; + int n = batch.size; + byte[][] vector = inputColVector.vector; + int[] length = inputColVector.length; + int[] start = inputColVector.start; + + + // return immediately if batch is empty + if (n == 0) { + return; + } + + if (inputColVector.noNulls) { + if (inputColVector.isRepeating) { + + // All must be selected otherwise size would be zero. Repeating property will not change. + if ((StringExpr.compare(vector[0], start[0], length[0], left, 0, left.length) < 0 + || StringExpr.compare(right, 0, right.length, vector[0], start[0], length[0]) < 0)) { + + //Entire batch is filtered out. + batch.size = 0; + } + } else if (batch.selectedInUse) { + int newSize = 0; + for(int j = 0; j != n; j++) { + int i = sel[j]; + if ((StringExpr.compare(left, 0, left.length, vector[i], start[i], length[i]) <= 0 + && StringExpr.compare(vector[i], start[i], length[i], right, 0, right.length) <= 0)) { + sel[newSize++] = i; + } + } + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if ((StringExpr.compare(left, 0, left.length, vector[i], start[i], length[i]) <= 0 + && StringExpr.compare(vector[i], start[i], length[i], right, 0, right.length) <= 0)) { + sel[newSize++] = i; + } + } + if (newSize < n) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } else { + if (inputColVector.isRepeating) { + + // All must be selected otherwise size would be zero. Repeating property will not change. + if (!nullPos[0]) { + if ((StringExpr.compare(vector[0], start[0], length[0], left, 0, left.length) < 0 + || StringExpr.compare(right, 0, right.length, vector[0], start[0], length[0]) < 0)) { + + //Entire batch is filtered out. + batch.size = 0; + } + } else { + batch.size = 0; + } + } else if (batch.selectedInUse) { + int newSize = 0; + for(int j=0; j != n; j++) { + int i = sel[j]; + if (!nullPos[i]) { + if ((StringExpr.compare(left, 0, left.length, vector[i], start[i], length[i]) <= 0 + && StringExpr.compare(vector[i], start[i], length[i], right, 0, right.length) <= 0)) { + sel[newSize++] = i; + } + } + } + + //Change the selected vector + batch.size = newSize; + } else { + int newSize = 0; + for(int i = 0; i != n; i++) { + if (!nullPos[i]) { + if ((StringExpr.compare(left, 0, left.length, vector[i], start[i], length[i]) <= 0 + && StringExpr.compare(vector[i], start[i], length[i], right, 0, right.length) <= 0)) { + sel[newSize++] = i; + } + } + } + if (newSize < n) { + batch.size = newSize; + batch.selectedInUse = true; + } + } + } + } + + @Override + public int getOutputColumn() { + return -1; + } + + @Override + public String getOutputType() { + return "boolean"; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public byte[] getLeft() { + return left; + } + + public void setLeft(byte[] value) { + this.left = value; + } + + public byte[] getRight() { + return right; + } + + public void setRight(byte[] value) { + this.right = value; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.FILTER) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } + +} diff --git ql/src/gen/vectorization/ExpressionTemplates/FilterTruncStringScalarCompareStringGroupColumn.txt ql/src/gen/vectorization/ExpressionTemplates/FilterTruncStringScalarCompareStringGroupColumn.txt new file mode 100644 index 0000000..2e40e48 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/FilterTruncStringScalarCompareStringGroupColumn.txt @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.; + +import org.apache.hadoop.hive.common.type.; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + * Do not edit the generated code directly. + */ +public class extends { + + public ( value, int colNum) { + this.colNum = colNum; + this.value = value.getValue().getBytes(); + } + + public () { + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.FILTER) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} \ No newline at end of file diff --git ql/src/gen/vectorization/ExpressionTemplates/StringColumnCompareColumn.txt ql/src/gen/vectorization/ExpressionTemplates/StringColumnCompareColumn.txt deleted file mode 100644 index 83e8934..0000000 --- ql/src/gen/vectorization/ExpressionTemplates/StringColumnCompareColumn.txt +++ /dev/null @@ -1,508 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; - -/** - * Filter the rows in a batch by comparing one string column to another. - * This code is generated from a template. - */ -public class extends VectorExpression { - - private static final long serialVersionUID = 1L; - - private int colNum1; - private int colNum2; - private int outputColumn; - - public (int colNum1, int colNum2, int outputColumn) { - this.colNum1 = colNum1; - this.colNum2 = colNum2; - this.outputColumn = outputColumn; - } - - public () { - } - - @Override - public void evaluate(VectorizedRowBatch batch) { - - if (childExpressions != null) { - super.evaluateChildren(batch); - } - - BytesColumnVector inputColVector1 = (BytesColumnVector) batch.cols[colNum1]; - BytesColumnVector inputColVector2 = (BytesColumnVector) batch.cols[colNum2]; - LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn]; - int[] sel = batch.selected; - boolean[] nullPos1 = inputColVector1.isNull; - boolean[] nullPos2 = inputColVector2.isNull; - boolean[] outNull = outputColVector.isNull; - - int n = batch.size; - byte[][] vector1 = inputColVector1.vector; - byte[][] vector2 = inputColVector2.vector; - int[] start1 = inputColVector1.start; - int[] start2 = inputColVector2.start; - int[] length1 = inputColVector1.length; - int[] length2 = inputColVector2.length; - - long[] outVector = outputColVector.vector; - - // return immediately if batch is empty - if (n == 0) { - return; - } - - outputColVector.noNulls = true; - outputColVector.isRepeating = false; - // handle case where neither input has nulls - if (inputColVector1.noNulls && inputColVector2.noNulls) { - outputColVector.noNulls = true; - if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - outputColVector.isRepeating = true; - int ret = StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[0], start2[0], length2[0]); - if (ret 0) { - outVector[0] = 1; - } else { - outVector[0] = 0; - } - } else if (inputColVector1.isRepeating) { - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } else { - for(int i = 0; i != n; i++) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else if (inputColVector2.isRepeating) { - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } else { - for(int i = 0; i != n; i++) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } else { - for(int i = 0; i != n; i++) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - - // handle case where only input 2 has nulls - } else if (inputColVector1.noNulls) { - outputColVector.noNulls = false; - if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - outputColVector.isRepeating = true; - outNull[0] = nullPos2[0]; - if (!nullPos2[0]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[0], start2[0], length2[0]) 0) { - outVector[0] = 1; - } else { - outVector[0] = 0; - } - } - } else if (inputColVector1.isRepeating) { - - // no need to check for nulls in input 1 - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - outNull[i] = nullPos2[i]; - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = nullPos2[i]; - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } - } else if (inputColVector2.isRepeating) { - if (nullPos2[0]) { - // Entire output vector will be null - outputColVector.isRepeating = true; - outNull[0] = true; - return; - } - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - outNull[i] = false; - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = false; - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { // neither input is repeating - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - outNull[i] = nullPos2[i]; - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = nullPos2[i]; - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } - } - - // handle case where only input 1 has nulls - } else if (inputColVector2.noNulls) { - outputColVector.noNulls = false; - if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - outputColVector.isRepeating = true; - outNull[0] = nullPos1[0]; - if (!nullPos1[0]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[0], start2[0], length2[0]) 0) { - outVector[0] = 1; - } else { - outVector[0] = 0; - } - } - } else if (inputColVector1.isRepeating) { - if (nullPos1[0]) { - // Entire output vector will be null - outputColVector.isRepeating = true; - outNull[0] = true; - return; - } - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - outNull[i] = false; - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = false; - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else if (inputColVector2.isRepeating) { - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - outNull[i] = nullPos1[i]; - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = nullPos1[i]; - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } - } else { // neither input is repeating - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - outNull[i] = nullPos1[i]; - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = nullPos1[i]; - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } - } - - // handle case where both inputs have nulls - } else { - outputColVector.noNulls = false; - if (inputColVector1.isRepeating && inputColVector2.isRepeating) { - outputColVector.isRepeating = true; - outNull[0] = nullPos1[0] || nullPos2[0]; - if (!outNull[0]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[0], start2[0], length2[0]) 0) { - outVector[0] = 1; - } else { - outVector[0] = 0; - } - } - } else if (inputColVector1.isRepeating) { - if (nullPos1[0]) { - outputColVector.isRepeating = true; - outNull[0] = true; - return; - } - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - outNull[i] = nullPos2[i]; - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = nullPos2[i]; - if (!nullPos2[i]) { - if (StringExpr.compare(vector1[0], start1[0], length1[0], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } - } else if (inputColVector2.isRepeating) { - if (nullPos2[0]) { - outputColVector.isRepeating = true; - outNull[0] = true; - return; - } - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - outNull[i] = nullPos1[i]; - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = nullPos1[i]; - if (!nullPos1[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[0], start2[0], length2[0]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } - } else { // neither input is repeating - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - outNull[i] = nullPos1[i] || nullPos2[i]; - if (!outNull[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = nullPos1[i] || nullPos2[i]; - if (!outNull[i]) { - if (StringExpr.compare(vector1[i], start1[i], length1[i], - vector2[i], start2[i], length2[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } - } - } - } - - @Override - public String getOutputType() { - return "boolean"; - } - - @Override - public int getOutputColumn() { - return outputColumn; - } - - public int getColNum1() { - return colNum1; - } - - public void setColNum1(int colNum1) { - this.colNum1 = colNum1; - } - - public int getColNum2() { - return colNum2; - } - - public void setColNum2(int colNum2) { - this.colNum2 = colNum2; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - - @Override - public VectorExpressionDescriptor.Descriptor getDescriptor() { - return (new VectorExpressionDescriptor.Builder()) - .setMode( - VectorExpressionDescriptor.Mode.PROJECTION) - .setNumArguments(2) - .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) - .setInputExpressionTypes( - VectorExpressionDescriptor.InputExpressionType.COLUMN, - VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); - } -} diff --git ql/src/gen/vectorization/ExpressionTemplates/StringColumnCompareScalar.txt ql/src/gen/vectorization/ExpressionTemplates/StringColumnCompareScalar.txt deleted file mode 100644 index a198cb6..0000000 --- ql/src/gen/vectorization/ExpressionTemplates/StringColumnCompareScalar.txt +++ /dev/null @@ -1,180 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; - -/** - * This is a generated class to evaluate a comparison on a vector of strings. - */ -public class extends VectorExpression { - - private static final long serialVersionUID = 1L; - - private int colNum; - private byte[] value; - private int outputColumn; - - public (int colNum, byte[] value, int outputColumn) { - this.colNum = colNum; - this.value = value; - this.outputColumn = outputColumn; - } - - public () { - } - - @Override - public void evaluate(VectorizedRowBatch batch) { - if (childExpressions != null) { - super.evaluateChildren(batch); - } - BytesColumnVector inputColVector = (BytesColumnVector) batch.cols[colNum]; - LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn]; - int[] sel = batch.selected; - boolean[] nullPos = inputColVector.isNull; - boolean[] outNull = outputColVector.isNull; - int n = batch.size; - byte[][] vector = inputColVector.vector; - int[] length = inputColVector.length; - int[] start = inputColVector.start; - long[] outVector = outputColVector.vector; - - - // return immediately if batch is empty - if (n == 0) { - return; - } - - outputColVector.isRepeating = false; - if (inputColVector.noNulls) { - outputColVector.noNulls = true; - if (inputColVector.isRepeating) { - outputColVector.isRepeating = true; - if (StringExpr.compare(vector[0], start[0], length[0], value, 0, value.length) 0) { - outVector[0] = 1; - } else { - outVector[0] = 0; - } - } else if (batch.selectedInUse) { - for(int j=0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } else { - for(int i = 0; i != n; i++) { - if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - outputColVector.noNulls = false; - if (inputColVector.isRepeating) { - outputColVector.isRepeating = true; - outNull[0] = nullPos[0]; - if (!nullPos[0]) { - if (StringExpr.compare(vector[0], start[0], length[0], value, 0, value.length) 0) { - outVector[0] = 1; - } else { - outVector[0] = 0; - } - } - } else if (batch.selectedInUse) { - for(int j=0; j != n; j++) { - int i = sel[j]; - outNull[i] = nullPos[i]; - if (!nullPos[i]) { - if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = nullPos[i]; - if (!nullPos[i]) { - if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } - } - } - - @Override - public int getOutputColumn() { - return outputColumn; - } - - @Override - public String getOutputType() { - return "boolean"; - } - - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public byte[] getValue() { - return value; - } - - public void setValue(byte[] value) { - this.value = value; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - - @Override - public VectorExpressionDescriptor.Descriptor getDescriptor() { - return (new VectorExpressionDescriptor.Builder()) - .setMode( - VectorExpressionDescriptor.Mode.PROJECTION) - .setNumArguments(2) - .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) - .setInputExpressionTypes( - VectorExpressionDescriptor.InputExpressionType.COLUMN, - VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); - } -} diff --git ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareStringGroupColumn.txt ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareStringGroupColumn.txt new file mode 100644 index 0000000..749edc7 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareStringGroupColumn.txt @@ -0,0 +1,508 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Filter the rows in a batch by comparing one string column to another. + * This code is generated from a template. + */ +public class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int colNum1; + private int colNum2; + private int outputColumn; + + public (int colNum1, int colNum2, int outputColumn) { + this.colNum1 = colNum1; + this.colNum2 = colNum2; + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + BytesColumnVector inputColVector1 = (BytesColumnVector) batch.cols[colNum1]; + BytesColumnVector inputColVector2 = (BytesColumnVector) batch.cols[colNum2]; + LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] nullPos1 = inputColVector1.isNull; + boolean[] nullPos2 = inputColVector2.isNull; + boolean[] outNull = outputColVector.isNull; + + int n = batch.size; + byte[][] vector1 = inputColVector1.vector; + byte[][] vector2 = inputColVector2.vector; + int[] start1 = inputColVector1.start; + int[] start2 = inputColVector2.start; + int[] length1 = inputColVector1.length; + int[] length2 = inputColVector2.length; + + long[] outVector = outputColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + outputColVector.noNulls = true; + outputColVector.isRepeating = false; + // handle case where neither input has nulls + if (inputColVector1.noNulls && inputColVector2.noNulls) { + outputColVector.noNulls = true; + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + outputColVector.isRepeating = true; + int ret = StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[0], start2[0], length2[0]); + if (ret 0) { + outVector[0] = 1; + } else { + outVector[0] = 0; + } + } else if (inputColVector1.isRepeating) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } else { + for(int i = 0; i != n; i++) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else if (inputColVector2.isRepeating) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } else { + for(int i = 0; i != n; i++) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } else { + for(int i = 0; i != n; i++) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + + // handle case where only input 2 has nulls + } else if (inputColVector1.noNulls) { + outputColVector.noNulls = false; + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + outputColVector.isRepeating = true; + outNull[0] = nullPos2[0]; + if (!nullPos2[0]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[0], start2[0], length2[0]) 0) { + outVector[0] = 1; + } else { + outVector[0] = 0; + } + } + } else if (inputColVector1.isRepeating) { + + // no need to check for nulls in input 1 + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outNull[i] = nullPos2[i]; + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = nullPos2[i]; + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } + } else if (inputColVector2.isRepeating) { + if (nullPos2[0]) { + // Entire output vector will be null + outputColVector.isRepeating = true; + outNull[0] = true; + return; + } + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outNull[i] = false; + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = false; + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { // neither input is repeating + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outNull[i] = nullPos2[i]; + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = nullPos2[i]; + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } + } + + // handle case where only input 1 has nulls + } else if (inputColVector2.noNulls) { + outputColVector.noNulls = false; + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + outputColVector.isRepeating = true; + outNull[0] = nullPos1[0]; + if (!nullPos1[0]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[0], start2[0], length2[0]) 0) { + outVector[0] = 1; + } else { + outVector[0] = 0; + } + } + } else if (inputColVector1.isRepeating) { + if (nullPos1[0]) { + // Entire output vector will be null + outputColVector.isRepeating = true; + outNull[0] = true; + return; + } + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outNull[i] = false; + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = false; + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else if (inputColVector2.isRepeating) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outNull[i] = nullPos1[i]; + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = nullPos1[i]; + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } + } else { // neither input is repeating + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outNull[i] = nullPos1[i]; + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = nullPos1[i]; + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } + } + + // handle case where both inputs have nulls + } else { + outputColVector.noNulls = false; + if (inputColVector1.isRepeating && inputColVector2.isRepeating) { + outputColVector.isRepeating = true; + outNull[0] = nullPos1[0] || nullPos2[0]; + if (!outNull[0]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[0], start2[0], length2[0]) 0) { + outVector[0] = 1; + } else { + outVector[0] = 0; + } + } + } else if (inputColVector1.isRepeating) { + if (nullPos1[0]) { + outputColVector.isRepeating = true; + outNull[0] = true; + return; + } + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outNull[i] = nullPos2[i]; + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = nullPos2[i]; + if (!nullPos2[i]) { + if (StringExpr.compare(vector1[0], start1[0], length1[0], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } + } else if (inputColVector2.isRepeating) { + if (nullPos2[0]) { + outputColVector.isRepeating = true; + outNull[0] = true; + return; + } + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outNull[i] = nullPos1[i]; + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = nullPos1[i]; + if (!nullPos1[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[0], start2[0], length2[0]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } + } else { // neither input is repeating + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + outNull[i] = nullPos1[i] || nullPos2[i]; + if (!outNull[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = nullPos1[i] || nullPos2[i]; + if (!outNull[i]) { + if (StringExpr.compare(vector1[i], start1[i], length1[i], + vector2[i], start2[i], length2[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } + } + } + } + + @Override + public String getOutputType() { + return "boolean"; + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + public int getColNum1() { + return colNum1; + } + + public void setColNum1(int colNum1) { + this.colNum1 = colNum1; + } + + public int getColNum2() { + return colNum2; + } + + public void setColNum2(int colNum2) { + this.colNum2 = colNum2; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareStringGroupScalarBase.txt ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareStringGroupScalarBase.txt new file mode 100644 index 0000000..4d65d50 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareStringGroupScalarBase.txt @@ -0,0 +1,158 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + */ +public abstract class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + protected int colNum; + protected byte[] value; + protected int outputColumn; + + @Override + public void evaluate(VectorizedRowBatch batch) { + if (childExpressions != null) { + super.evaluateChildren(batch); + } + BytesColumnVector inputColVector = (BytesColumnVector) batch.cols[colNum]; + LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] nullPos = inputColVector.isNull; + boolean[] outNull = outputColVector.isNull; + int n = batch.size; + byte[][] vector = inputColVector.vector; + int[] length = inputColVector.length; + int[] start = inputColVector.start; + long[] outVector = outputColVector.vector; + + + // return immediately if batch is empty + if (n == 0) { + return; + } + + outputColVector.isRepeating = false; + if (inputColVector.noNulls) { + outputColVector.noNulls = true; + if (inputColVector.isRepeating) { + outputColVector.isRepeating = true; + if (StringExpr.compare(vector[0], start[0], length[0], value, 0, value.length) 0) { + outVector[0] = 1; + } else { + outVector[0] = 0; + } + } else if (batch.selectedInUse) { + for(int j=0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } else { + for(int i = 0; i != n; i++) { + if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + outputColVector.noNulls = false; + if (inputColVector.isRepeating) { + outputColVector.isRepeating = true; + outNull[0] = nullPos[0]; + if (!nullPos[0]) { + if (StringExpr.compare(vector[0], start[0], length[0], value, 0, value.length) 0) { + outVector[0] = 1; + } else { + outVector[0] = 0; + } + } + } else if (batch.selectedInUse) { + for(int j=0; j != n; j++) { + int i = sel[j]; + outNull[i] = nullPos[i]; + if (!nullPos[i]) { + if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = nullPos[i]; + if (!nullPos[i]) { + if (StringExpr.compare(vector[i], start[i], length[i], value, 0, value.length) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } + } + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return "boolean"; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public byte[] getValue() { + return value; + } + + public void setValue(byte[] value) { + this.value = value; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } + +} \ No newline at end of file diff --git ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareStringScalar.txt ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareStringScalar.txt new file mode 100644 index 0000000..1df7157 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareStringScalar.txt @@ -0,0 +1,57 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + */ +public class extends { + + public (int colNum, byte[] value, int outputColumn) { + this.colNum = colNum; + this.value = value; + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.STRING) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} \ No newline at end of file diff --git ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareTruncStringScalar.txt ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareTruncStringScalar.txt new file mode 100644 index 0000000..716f57a --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/StringGroupColumnCompareTruncStringScalar.txt @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.; + +import org.apache.hadoop.hive.common.type.; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + */ +public class extends { + + public (int colNum, value, int outputColumn) { + this.colNum = colNum; + this.value = value.getValue().getBytes(); + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.getType("")) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} \ No newline at end of file diff --git ql/src/gen/vectorization/ExpressionTemplates/StringGroupScalarCompareStringGroupColumnBase.txt ql/src/gen/vectorization/ExpressionTemplates/StringGroupScalarCompareStringGroupColumnBase.txt new file mode 100644 index 0000000..a734281 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/StringGroupScalarCompareStringGroupColumnBase.txt @@ -0,0 +1,157 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + * Do not edit the generated code directly. + */ +public abstract class extends VectorExpression { + + private static final long serialVersionUID = 1L; + + protected int colNum; + protected byte[] value; + protected int outputColumn; + + @Override + public void evaluate(VectorizedRowBatch batch) { + if (childExpressions != null) { + super.evaluateChildren(batch); + } + BytesColumnVector inputColVector = (BytesColumnVector) batch.cols[colNum]; + LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] nullPos = inputColVector.isNull; + boolean[] outNull = outputColVector.isNull; + int n = batch.size; + byte[][] vector = inputColVector.vector; + int[] length = inputColVector.length; + int[] start = inputColVector.start; + long[] outVector = outputColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + outputColVector.isRepeating = false; + if (inputColVector.noNulls) { + outputColVector.noNulls = true; + if (inputColVector.isRepeating) { + outputColVector.isRepeating = true; + if (StringExpr.compare(value, 0, value.length, vector[0], start[0], length[0]) 0) { + outVector[0] = 1; + } else { + outVector[0] = 0; + } + } else if (batch.selectedInUse) { + for(int j=0; j != n; j++) { + int i = sel[j]; + if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } else { + for(int i = 0; i != n; i++) { + if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + outputColVector.noNulls = false; + if (inputColVector.isRepeating) { + outputColVector.isRepeating = true; + outNull[0] = nullPos[0]; + if (!nullPos[0]) { + if (StringExpr.compare(value, 0, value.length, vector[0], start[0], length[0]) 0) { + outVector[0] = 1; + } else { + outVector[0] = 0; + } + } + } else if (batch.selectedInUse) { + for(int j=0; j != n; j++) { + int i = sel[j]; + outNull[i] = nullPos[i]; + if (!nullPos[i]) { + if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } else { + for(int i = 0; i != n; i++) { + outNull[i] = nullPos[i]; + if (!nullPos[i]) { + if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { + outVector[i] = 1; + } else { + outVector[i] = 0; + } + } + } + } + } + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return "boolean"; + } + + public int getColNum() { + return colNum; + } + + public void setColNum(int colNum) { + this.colNum = colNum; + } + + public byte[] getValue() { + return value; + } + + public void setValue(byte[] value) { + this.value = value; + } + + public void setOutputColumn(int outputColumn) { + this.outputColumn = outputColumn; + } +} \ No newline at end of file diff --git ql/src/gen/vectorization/ExpressionTemplates/StringScalarCompareColumn.txt ql/src/gen/vectorization/ExpressionTemplates/StringScalarCompareColumn.txt deleted file mode 100644 index c797018..0000000 --- ql/src/gen/vectorization/ExpressionTemplates/StringScalarCompareColumn.txt +++ /dev/null @@ -1,180 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; - -/** - * This is a generated class to evaluate a comparison on a vector of strings. - * Do not edit the generated code directly. - */ -public class extends VectorExpression { - - private static final long serialVersionUID = 1L; - - private int colNum; - private byte[] value; - private int outputColumn; - - public (byte[] value, int colNum, int outputColumn) { - this.colNum = colNum; - this.value = value; - this.outputColumn = outputColumn; - } - - public () { - } - - @Override - public void evaluate(VectorizedRowBatch batch) { - if (childExpressions != null) { - super.evaluateChildren(batch); - } - BytesColumnVector inputColVector = (BytesColumnVector) batch.cols[colNum]; - LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn]; - int[] sel = batch.selected; - boolean[] nullPos = inputColVector.isNull; - boolean[] outNull = outputColVector.isNull; - int n = batch.size; - byte[][] vector = inputColVector.vector; - int[] length = inputColVector.length; - int[] start = inputColVector.start; - long[] outVector = outputColVector.vector; - - // return immediately if batch is empty - if (n == 0) { - return; - } - - outputColVector.isRepeating = false; - if (inputColVector.noNulls) { - outputColVector.noNulls = true; - if (inputColVector.isRepeating) { - outputColVector.isRepeating = true; - if (StringExpr.compare(value, 0, value.length, vector[0], start[0], length[0]) 0) { - outVector[0] = 1; - } else { - outVector[0] = 0; - } - } else if (batch.selectedInUse) { - for(int j=0; j != n; j++) { - int i = sel[j]; - if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } else { - for(int i = 0; i != n; i++) { - if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - outputColVector.noNulls = false; - if (inputColVector.isRepeating) { - outputColVector.isRepeating = true; - outNull[0] = nullPos[0]; - if (!nullPos[0]) { - if (StringExpr.compare(value, 0, value.length, vector[0], start[0], length[0]) 0) { - outVector[0] = 1; - } else { - outVector[0] = 0; - } - } - } else if (batch.selectedInUse) { - for(int j=0; j != n; j++) { - int i = sel[j]; - outNull[i] = nullPos[i]; - if (!nullPos[i]) { - if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } else { - for(int i = 0; i != n; i++) { - outNull[i] = nullPos[i]; - if (!nullPos[i]) { - if (StringExpr.compare(value, 0, value.length, vector[i], start[i], length[i]) 0) { - outVector[i] = 1; - } else { - outVector[i] = 0; - } - } - } - } - } - } - - @Override - public int getOutputColumn() { - return outputColumn; - } - - @Override - public String getOutputType() { - return "boolean"; - } - - public int getColNum() { - return colNum; - } - - public void setColNum(int colNum) { - this.colNum = colNum; - } - - public byte[] getValue() { - return value; - } - - public void setValue(byte[] value) { - this.value = value; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - - @Override - public VectorExpressionDescriptor.Descriptor getDescriptor() { - return (new VectorExpressionDescriptor.Builder()) - .setMode( - VectorExpressionDescriptor.Mode.PROJECTION) - .setNumArguments(2) - .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) - .setInputExpressionTypes( - VectorExpressionDescriptor.InputExpressionType.SCALAR, - VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); - } -} diff --git ql/src/gen/vectorization/ExpressionTemplates/StringScalarCompareStringGroupColumn.txt ql/src/gen/vectorization/ExpressionTemplates/StringScalarCompareStringGroupColumn.txt new file mode 100644 index 0000000..fcf55e1 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/StringScalarCompareStringGroupColumn.txt @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + * Do not edit the generated code directly. + */ +public class extends { + + private static final long serialVersionUID = 1L; + + public (byte[] value, int colNum, int outputColumn) { + this.colNum = colNum; + this.value = value; + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.STRING, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git ql/src/gen/vectorization/ExpressionTemplates/TruncStringScalarCompareStringGroupColumn.txt ql/src/gen/vectorization/ExpressionTemplates/TruncStringScalarCompareStringGroupColumn.txt new file mode 100644 index 0000000..4c0ba04 --- /dev/null +++ ql/src/gen/vectorization/ExpressionTemplates/TruncStringScalarCompareStringGroupColumn.txt @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions.gen; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.; + +import org.apache.hadoop.hive.common.type.; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * This is a generated class to evaluate a comparison on a vector of strings. + * Do not edit the generated code directly. + */ +public class extends { + + private static final long serialVersionUID = 1L; + + public ( value, int colNum, int outputColumn) { + this.colNum = colNum; + this.value = value.getValue().getBytes(); + this.outputColumn = outputColumn; + } + + public () { + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(2) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.getType(""), + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java index 2d67b5b..fb43aee 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java @@ -35,8 +35,11 @@ LONG(1), DOUBLE(2), STRING(3), - DECIMAL(4), - ANY(7); + CHAR(4), + VARCHAR(5), + STRING_GROUP(6), + DECIMAL(7), + ANY(8); private final int value; @@ -162,6 +165,27 @@ public Descriptor build() { */ public static final class Descriptor { + private boolean isStringGroup(ArgumentType type) { + if (type == ArgumentType.STRING || + type == ArgumentType.CHAR || + type == ArgumentType.VARCHAR) { + return true; + } + return false; + } + private boolean isSameGroup(ArgumentType type1, ArgumentType type2) { + if (type1.equals(ArgumentType.ANY) || + type2.equals(ArgumentType.ANY)) { + return true; + } + if (type1 == ArgumentType.STRING_GROUP && isStringGroup(type2)) { + return true; + } + if (type2 == ArgumentType.STRING_GROUP && isStringGroup(type1)) { + return true; + } + return false; + } @Override public boolean equals(Object o) { Descriptor other = (Descriptor) o; @@ -169,8 +193,8 @@ public boolean equals(Object o) { return false; } for (int i = 0; i < argCount; i++) { - if (!argTypes[i].equals(other.argTypes[i]) && (!argTypes[i].equals(ArgumentType.ANY) && - !other.argTypes[i].equals(ArgumentType.ANY))) { + if (!argTypes[i].equals(other.argTypes[i]) && + !isSameGroup(argTypes[i], other.argTypes[i])) { return false; } if (!exprTypes[i].equals(other.exprTypes[i])) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index a78c396..5f34588 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -97,11 +97,13 @@ import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; +import org.apache.hadoop.util.StringUtils; /** * Context class for vectorization execution. @@ -123,6 +125,15 @@ public static final Pattern decimalTypePattern = Pattern.compile("decimal.*", Pattern.CASE_INSENSITIVE); + public static final Pattern charTypePattern = Pattern.compile("char.*", + Pattern.CASE_INSENSITIVE); + + public static final Pattern varcharTypePattern = Pattern.compile("varchar.*", + Pattern.CASE_INSENSITIVE); + + public static final Pattern charVarcharTypePattern = Pattern.compile("char.*|varchar.*", + Pattern.CASE_INSENSITIVE); + //Map column number to type private final OutputColumnManager ocm; @@ -548,6 +559,12 @@ private GenericUDF getGenericUDFForCast(TypeInfo castType) throws HiveException case STRING: udfClass = new UDFToString(); break; + case CHAR: + genericUdf = new GenericUDFToChar(); + break; + case VARCHAR: + genericUdf = new GenericUDFToVarchar(); + break; case BOOLEAN: udfClass = new UDFToBoolean(); break; @@ -572,7 +589,7 @@ private GenericUDF getGenericUDFForCast(TypeInfo castType) throws HiveException ((GenericUDFBridge) genericUdf).setUdfClassName(udfClass.getClass().getName()); } if (genericUdf instanceof SettableUDF) { - ((SettableUDF)genericUdf).setTypeInfo(castType); + ((SettableUDF)genericUdf).setTypeInfo(castType); } return genericUdf; } @@ -592,15 +609,15 @@ public static boolean isNonVectorizedPathUDF(ExprNodeGenericFuncDesc expr) { Class udfClass = bridge.getUdfClass(); if (udfClass.equals(UDFHex.class) || udfClass.equals(UDFConv.class) - || isCastToIntFamily(udfClass) && arg0Type(expr).equals("string") - || isCastToFloatFamily(udfClass) && arg0Type(expr).equals("string") + || isCastToIntFamily(udfClass) && isStringFamily(arg0Type(expr)) + || isCastToFloatFamily(udfClass) && isStringFamily(arg0Type(expr)) || udfClass.equals(UDFToString.class) && (arg0Type(expr).equals("timestamp") || arg0Type(expr).equals("double") || arg0Type(expr).equals("float"))) { return true; } - } else if ((gudf instanceof GenericUDFTimestamp && arg0Type(expr).equals("string")) + } else if ((gudf instanceof GenericUDFTimestamp && isStringFamily(arg0Type(expr))) /* GenericUDFCase and GenericUDFWhen are implemented with the UDF Adaptor because * of their complexity and generality. In the future, variations of these @@ -615,6 +632,16 @@ public static boolean isNonVectorizedPathUDF(ExprNodeGenericFuncDesc expr) { || gudf instanceof GenericUDFCase || gudf instanceof GenericUDFWhen) { return true; + } else if (gudf instanceof GenericUDFToChar && + (arg0Type(expr).equals("timestamp") + || arg0Type(expr).equals("double") + || arg0Type(expr).equals("float"))) { + return true; + } else if (gudf instanceof GenericUDFToVarchar && + (arg0Type(expr).equals("timestamp") + || arg0Type(expr).equals("double") + || arg0Type(expr).equals("float"))) { + return true; } return false; } @@ -723,12 +750,15 @@ private VectorExpression getConstantVectorExpression(Object constantValue, TypeI Mode mode) throws HiveException { String type = typeInfo.getTypeName(); String colVectorType = getNormalizedTypeName(type); + if (colVectorType == null) { + throw new HiveException("No vector type for type name " + type); + } int outCol = -1; if (mode == Mode.PROJECTION) { outCol = ocm.allocateOutputColumn(colVectorType); } if (constantValue == null) { - return new ConstantVectorExpression(outCol, type, true); + return new ConstantVectorExpression(outCol, type, true); } else if (decimalTypePattern.matcher(type).matches()) { VectorExpression ve = new ConstantVectorExpression(outCol, (Decimal128) constantValue); ve.setOutputType(typeInfo.getTypeName()); @@ -739,7 +769,7 @@ private VectorExpression getConstantVectorExpression(Object constantValue, TypeI ((Number) constantValue).longValue()); } else if (type.equalsIgnoreCase("double") || type.equalsIgnoreCase("float")) { return new ConstantVectorExpression(outCol, ((Number) constantValue).doubleValue()); - } else if (type.equalsIgnoreCase("string")) { + } else if (isStringFamily(type)) { return new ConstantVectorExpression(outCol, ((String) constantValue).getBytes()); } else if (type.equalsIgnoreCase("boolean")) { if (mode == Mode.FILTER) { @@ -799,7 +829,15 @@ private VectorExpression getVectorExpressionForUdf(Class udf, List vectorClass, try { for (int i = 0; i < numChildren; i++) { ExprNodeDesc child = childExpr.get(i); - inputTypes[i] = VectorExpression.Type.getValue(child.getTypeInfo().getTypeName()); + inputTypes[i] = VectorExpression.Type.getValue(getUndecoratedTypeName(child.getTypeInfo().getTypeName())); if (child instanceof ExprNodeGenericFuncDesc) { VectorExpression vChild = getVectorExpression(child, childrenMode); children.add(vChild); @@ -870,36 +908,68 @@ private Mode getChildrenMode(Mode mode, Class udf) { return Mode.PROJECTION; } + private String getNewInstanceArgumentString(Object [] args) { + ArrayList argClasses = new ArrayList(); + for (Object obj : args) { + argClasses.add(obj.getClass().getSimpleName()); + } + return "arguments: " + Arrays.toString(args) + ", argument classes: " + argClasses.toString(); + } + private VectorExpression instantiateExpression(Class vclass, TypeInfo returnType, Object...args) throws HiveException { VectorExpression ve = null; Constructor ctor = getConstructor(vclass); int numParams = ctor.getParameterTypes().length; int argsLength = (args == null) ? 0 : args.length; - try { - if (numParams == 0) { + if (numParams == 0) { + try { ve = (VectorExpression) ctor.newInstance(); - } else if (numParams == argsLength) { + } catch (Exception ex) { + throw new HiveException("Could not instantiate " + vclass.getSimpleName() + " with 0 arguments, exception: " + + StringUtils.stringifyException(ex)); + } + } else if (numParams == argsLength) { + try { ve = (VectorExpression) ctor.newInstance(args); - } else if (numParams == argsLength + 1) { - // Additional argument is needed, which is the outputcolumn. + } catch (Exception ex) { + throw new HiveException("Could not instantiate " + vclass.getSimpleName() + " with " + getNewInstanceArgumentString(args) + ", exception: " + + StringUtils.stringifyException(ex)); + } + } else if (numParams == argsLength + 1) { + // Additional argument is needed, which is the outputcolumn. + Object [] newArgs = null; + try { String outType; // Special handling for decimal because decimal types need scale and precision parameter. // This special handling should be avoided by using returnType uniformly for all cases. if (returnType != null) { outType = getNormalizedTypeName(returnType.getTypeName()).toLowerCase(); + if (outType == null) { + throw new HiveException("No vector type for type name " + returnType); + } } else { outType = ((VectorExpression) vclass.newInstance()).getOutputType(); } int outputCol = ocm.allocateOutputColumn(outType); - Object [] newArgs = Arrays.copyOf(args, numParams); + newArgs = Arrays.copyOf(args, numParams); newArgs[numParams-1] = outputCol; + ve = (VectorExpression) ctor.newInstance(newArgs); ve.setOutputType(outType); + } catch (Exception ex) { + throw new HiveException("Could not instantiate " + vclass.getSimpleName() + " with arguments " + getNewInstanceArgumentString(newArgs) + ", exception: " + + StringUtils.stringifyException(ex)); + } + } + // Add maxLength parameter to UDFs that have CHAR or VARCHAR output. + if (ve instanceof TruncStringOutput) { + TruncStringOutput truncStringOutput = (TruncStringOutput) ve; + if (returnType instanceof BaseCharTypeInfo) { + BaseCharTypeInfo baseCharTypeInfo = (BaseCharTypeInfo) returnType; + truncStringOutput.setMaxLength(baseCharTypeInfo.getLength()); } - } catch (Exception ex) { - throw new HiveException("Could not instantiate " + vclass.getSimpleName(), ex); } return ve; } @@ -933,8 +1003,12 @@ private VectorExpression getGenericUdfVectorExpression(GenericUDF udf, } } else if (udf instanceof GenericUDFToDecimal) { return getCastToDecimal(childExpr, returnType); - } - + } else if (udf instanceof GenericUDFToChar) { + return getCastToChar(childExpr, returnType); + } else if (udf instanceof GenericUDFToVarchar) { + return getCastToVarChar(childExpr, returnType); + } + // Now do a general lookup Class udfClass = udf.getClass(); if (udf instanceof GenericUDFBridge) { @@ -1265,6 +1339,64 @@ private VectorExpression getCastToString(List childExpr, TypeInfo throw new HiveException("Unhandled cast input type: " + inputType); } + private VectorExpression getCastToChar(List childExpr, TypeInfo returnType) + throws HiveException { + ExprNodeDesc child = childExpr.get(0); + String inputType = childExpr.get(0).getTypeString(); + if (child instanceof ExprNodeConstantDesc) { + // Don't do constant folding here. Wait until the optimizer is changed to do it. + // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424. + return null; + } + if (inputType.equals("boolean")) { + // Boolean must come before the integer family. It's a special case. + return createVectorExpression(CastBooleanToCharViaLongToChar.class, childExpr, Mode.PROJECTION, null); + } else if (isIntFamily(inputType)) { + return createVectorExpression(CastLongToChar.class, childExpr, Mode.PROJECTION, null); + } else if (isDecimalFamily(inputType)) { + return createVectorExpression(CastDecimalToChar.class, childExpr, Mode.PROJECTION, returnType); + } else if (isDateFamily(inputType)) { + return createVectorExpression(CastDateToChar.class, childExpr, Mode.PROJECTION, returnType); + } else if (isStringFamily(inputType)) { + return createVectorExpression(CastStringGroupToChar.class, childExpr, Mode.PROJECTION, returnType); + } + + /* + * Timestamp, float, and double types are handled by the legacy code path. See isLegacyPathUDF. + */ + + throw new HiveException("Unhandled cast input type: " + inputType); + } + + private VectorExpression getCastToVarChar(List childExpr, TypeInfo returnType) + throws HiveException { + ExprNodeDesc child = childExpr.get(0); + String inputType = childExpr.get(0).getTypeString(); + if (child instanceof ExprNodeConstantDesc) { + // Don't do constant folding here. Wait until the optimizer is changed to do it. + // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424. + return null; + } + if (inputType.equals("boolean")) { + // Boolean must come before the integer family. It's a special case. + return createVectorExpression(CastBooleanToVarCharViaLongToVarChar.class, childExpr, Mode.PROJECTION, null); + } else if (isIntFamily(inputType)) { + return createVectorExpression(CastLongToVarChar.class, childExpr, Mode.PROJECTION, null); + } else if (isDecimalFamily(inputType)) { + return createVectorExpression(CastDecimalToVarChar.class, childExpr, Mode.PROJECTION, returnType); + } else if (isDateFamily(inputType)) { + return createVectorExpression(CastDateToVarChar.class, childExpr, Mode.PROJECTION, returnType); + } else if (isStringFamily(inputType)) { + return createVectorExpression(CastStringGroupToVarChar.class, childExpr, Mode.PROJECTION, returnType); + } + + /* + * Timestamp, float, and double types are handled by the legacy code path. See isLegacyPathUDF. + */ + + throw new HiveException("Unhandled cast input type: " + inputType); + } + private VectorExpression getCastToDoubleExpression(Class udf, List childExpr, TypeInfo returnType) throws HiveException { ExprNodeDesc child = childExpr.get(0); @@ -1304,7 +1436,7 @@ private VectorExpression getCastToBoolean(List childExpr) return getConstantVectorExpression(null, TypeInfoFactory.booleanTypeInfo, Mode.PROJECTION); } // Long and double are handled using descriptors, string needs to be specially handled. - if (inputType.equals("string")) { + if (isStringFamily(inputType)) { // string casts to false if it is 0 characters long, otherwise true VectorExpression lenExpr = createVectorExpression(StringLength.class, childExpr, Mode.PROJECTION, null); @@ -1411,6 +1543,14 @@ private VectorExpression getBetweenFilterExpression(List childExpr cl = FilterStringColumnBetween.class; } else if (colType.equals("string") && notKeywordPresent) { cl = FilterStringColumnNotBetween.class; + } else if (varcharTypePattern.matcher(colType).matches() && !notKeywordPresent) { + cl = FilterVarCharColumnBetween.class; + } else if (varcharTypePattern.matcher(colType).matches() && notKeywordPresent) { + cl = FilterVarCharColumnNotBetween.class; + } else if (charTypePattern.matcher(colType).matches() && !notKeywordPresent) { + cl = FilterCharColumnBetween.class; + } else if (charTypePattern.matcher(colType).matches() && notKeywordPresent) { + cl = FilterCharColumnNotBetween.class; } else if (colType.equals("timestamp")) { // Get timestamp boundary values as longs instead of the expected strings @@ -1509,7 +1649,7 @@ private VectorExpression getCustomUDFExpression(ExprNodeGenericFuncDesc expr) } public static boolean isStringFamily(String resultType) { - return resultType.equalsIgnoreCase("string"); + return resultType.equalsIgnoreCase("string") || charVarcharTypePattern.matcher(resultType).matches(); } public static boolean isDatetimeFamily(String resultType) { @@ -1617,7 +1757,7 @@ private long getTimestampScalar(ExprNodeDesc expr) throws HiveException { "Non-constant argument not supported for vectorization."); } ExprNodeConstantDesc constExpr = (ExprNodeConstantDesc) expr; - if (constExpr.getTypeString().equals("string")) { + if (isStringFamily(constExpr.getTypeString())) { // create expression tree with type cast from string to timestamp ExprNodeGenericFuncDesc expr2 = new ExprNodeGenericFuncDesc(); @@ -1667,34 +1807,88 @@ private long evaluateCastToTimestamp(ExprNodeDesc expr) throws HiveException { } } - static String getNormalizedTypeName(String colType){ + static String getNormalizedTypeName(String colType) { String normalizedType = null; - if (colType.equalsIgnoreCase("Double") || colType.equalsIgnoreCase("Float")) { + String lower = colType.toLowerCase(); + if (lower.equals("double") || lower.equals("float")) { normalizedType = "Double"; - } else if (colType.equalsIgnoreCase("String")) { + } else if (lower.equals("string")) { normalizedType = "String"; - } else if (decimalTypePattern.matcher(colType).matches()) { + } else if (charTypePattern.matcher(lower).matches()) { + //Return the CHAR type as is, it includes maximum length. + normalizedType = colType; + } else if (varcharTypePattern.matcher(lower).matches()) { + //Return the VARCHAR type as is, it includes maximum length. + normalizedType = colType; + } else if (decimalTypePattern.matcher(lower).matches()) { //Return the decimal type as is, it includes scale and precision. normalizedType = colType; - } else { + } else if (lower.equals("tinyint") || + lower.equals("smallint") || + lower.equals("int") || + lower.equals("bigint") || + lower.equals("boolean") || + lower.equals("long") || + lower.equals("timestamp") || + lower.equals("date")) { normalizedType = "Long"; + } else { + normalizedType = null; } return normalizedType; } + + static String getUndecoratedTypeName(String colType) throws HiveException { + String undecoratedType = null; + String lower = colType.toLowerCase(); + if (lower.equals("double") || lower.equals("float")) { + undecoratedType = "Double"; + } else if (lower.equals("string")) { + undecoratedType = "String"; + } else if (charTypePattern.matcher(lower).matches()) { + //Return the CHAR type without maximum length. + undecoratedType = "Char"; + } else if (varcharTypePattern.matcher(lower).matches()) { + //Return the VARCHAR type without. + undecoratedType = "VarChar"; + } else if (decimalTypePattern.matcher(lower).matches()) { + //Return the decimal type without scale and precision. + undecoratedType = "Decimal"; + } else if (lower.equals("tinyint") || + lower.equals("smallint") || + lower.equals("int") || + lower.equals("bigint") || + lower.equals("boolean") || + lower.equals("long") || + lower.equals("timestamp") || + lower.equals("date")) { + undecoratedType = "Long"; + } else { + undecoratedType = null; + } + return undecoratedType; + } + static Object[][] aggregatesDefinition = { {"min", "Long", VectorUDAFMinLong.class}, {"min", "Double", VectorUDAFMinDouble.class}, {"min", "String", VectorUDAFMinString.class}, + {"min", "Char", VectorUDAFMinString.class}, + {"min", "VarChar", VectorUDAFMinString.class}, {"min", "Decimal",VectorUDAFMinDecimal.class}, {"max", "Long", VectorUDAFMaxLong.class}, {"max", "Double", VectorUDAFMaxDouble.class}, {"max", "String", VectorUDAFMaxString.class}, + {"max", "Char", VectorUDAFMaxString.class}, + {"max", "VarChar", VectorUDAFMaxString.class}, {"max", "Decimal",VectorUDAFMaxDecimal.class}, {"count", null, VectorUDAFCountStar.class}, {"count", "Long", VectorUDAFCount.class}, {"count", "Double", VectorUDAFCount.class}, {"count", "String", VectorUDAFCount.class}, + {"count", "Char", VectorUDAFCount.class}, + {"count", "VarChar", VectorUDAFCount.class}, {"count", "Decimal",VectorUDAFCount.class}, {"sum", "Long", VectorUDAFSumLong.class}, {"sum", "Double", VectorUDAFSumDouble.class}, @@ -1741,10 +1935,7 @@ public VectorAggregateExpression getAggregatorExpression(AggregationDesc desc) if (paramDescList.size() > 0) { ExprNodeDesc inputExpr = paramDescList.get(0); - inputType = getNormalizedTypeName(inputExpr.getTypeString()); - if (decimalTypePattern.matcher(inputType).matches()) { - inputType = "Decimal"; - } + inputType = getUndecoratedTypeName(inputExpr.getTypeString()); } for (Object[] aggDef : aggregatesDefinition) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java index 16454e7..4fc11e0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java @@ -23,11 +23,16 @@ import java.util.LinkedList; import java.util.List; +import org.apache.hadoop.hive.common.type.HiveChar; +import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; +import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; +import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.io.TimestampWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -126,6 +131,8 @@ private static void allocateColumnVector(StructObjectInspector oi, break; case BINARY: case STRING: + case CHAR: + case VARCHAR: cvList.add(new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE)); break; case DECIMAL: @@ -375,6 +382,53 @@ public static void addRowToBatchFrom(Object row, StructObjectInspector oi, } } break; + case CHAR: { + BytesColumnVector bcv = (BytesColumnVector) batch.cols[off + i]; + if (writableCol != null) { + bcv.isNull[rowIndex] = false; + HiveChar colHiveChar = ((HiveCharWritable) writableCol).getHiveChar(); + byte[] bytes = colHiveChar.toString().getBytes(); + + // UNDONE: How do we get the maxLength here? + // UNDONE: We need to use StringExpr.rightTrimAndTruncate here... + int trimmedByteLength = StringExpr.rightTrim(bytes, 0, bytes.length); + + int start = buffer.getLength(); + try { + // In vector mode, we store CHAR as unpadded. + buffer.write(bytes, 0, trimmedByteLength); + } catch (IOException ioe) { + throw new IllegalStateException("bad write", ioe); + } + bcv.setRef(rowIndex, buffer.getData(), start, trimmedByteLength); + } else { + setNullColIsNullValue(bcv, rowIndex); + } + } + break; + case VARCHAR: { + BytesColumnVector bcv = (BytesColumnVector) batch.cols[off + i]; + if (writableCol != null) { + bcv.isNull[rowIndex] = false; + HiveVarchar colHiveVarchar = ((HiveVarcharWritable) writableCol).getHiveVarchar(); + byte[] bytes = colHiveVarchar.toString().getBytes(); + + // UNDONE: How do we get the maxLength here? + // UNDONE: We need to use StringExpr.truncate here... + int length = bytes.length; + + int start = buffer.getLength(); + try { + buffer.write(bytes, 0, length); + } catch (IOException ioe) { + throw new IllegalStateException("bad write", ioe); + } + bcv.setRef(rowIndex, buffer.getData(), start, length); + } else { + setNullColIsNullValue(bcv, rowIndex); + } + } + break; case DECIMAL: DecimalColumnVector dcv = (DecimalColumnVector) batch.cols[off + i]; if (writableCol != null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java index 9669c91..5ce7553 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java @@ -158,7 +158,10 @@ public Writable serializeVector(VectorizedRowBatch vrg, ObjectInspector objInspe serializeVectorStream.write(bytes, 0, bytes.length); } break; - case STRING: { + case STRING: + case CHAR: + case VARCHAR: { + // Is it correct to escape CHAR and VARCHAR? BytesColumnVector bcv = (BytesColumnVector) batch.cols[k]; LazyUtils.writeEscaped(serializeVectorStream, bcv.vector[rowIndex], bcv.start[rowIndex], diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java index 2536817..193f503 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java @@ -278,7 +278,7 @@ public VectorizedRowBatch createVectorizedRowBatch() throws HiveException case PRIMITIVE: { PrimitiveObjectInspector poi = (PrimitiveObjectInspector) foi; // Vectorization currently only supports the following data types: - // BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, BINARY, STRING, TIMESTAMP, + // BOOLEAN, BYTE, SHORT, INT, LONG, FLOAT, DOUBLE, BINARY, STRING, CHAR, VARCHAR, TIMESTAMP, // DATE and DECIMAL switch (poi.getPrimitiveCategory()) { case BOOLEAN: @@ -296,6 +296,8 @@ public VectorizedRowBatch createVectorizedRowBatch() throws HiveException break; case BINARY: case STRING: + case CHAR: + case VARCHAR: result.cols[j] = new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE); break; case DECIMAL: @@ -544,7 +546,9 @@ public void addPartitionColsToBatch(VectorizedRowBatch batch) throws HiveExcepti } break; - case STRING: { + case STRING: + case CHAR: + case VARCHAR: { BytesColumnVector bcv = (BytesColumnVector) batch.cols[colIndex]; String sVal = (String) value; if (sVal == null) { @@ -566,13 +570,17 @@ public void addPartitionColsToBatch(VectorizedRowBatch batch) throws HiveExcepti } } - private void addScratchColumnsToBatch(VectorizedRowBatch vrb) { + private void addScratchColumnsToBatch(VectorizedRowBatch vrb) throws HiveException { if (columnTypeMap != null && !columnTypeMap.isEmpty()) { int origNumCols = vrb.numCols; int newNumCols = vrb.cols.length+columnTypeMap.keySet().size(); vrb.cols = Arrays.copyOf(vrb.cols, newNumCols); for (int i = origNumCols; i < newNumCols; i++) { - vrb.cols[i] = allocateColumnVector(columnTypeMap.get(i), + String typeName = columnTypeMap.get(i); + if (typeName == null) { + throw new HiveException("No type found for column type entry " + i); + } + vrb.cols[i] = allocateColumnVector(typeName, VectorizedRowBatch.DEFAULT_SIZE); } vrb.numCols = vrb.cols.length; @@ -599,13 +607,15 @@ private void addScratchColumnsToBatch(VectorizedRowBatch vrb) { private ColumnVector allocateColumnVector(String type, int defaultSize) { if (type.equalsIgnoreCase("double")) { return new DoubleColumnVector(defaultSize); - } else if (type.equalsIgnoreCase("string")) { + } else if (VectorizationContext.isStringFamily(type)) { return new BytesColumnVector(defaultSize); } else if (VectorizationContext.decimalTypePattern.matcher(type).matches()){ int [] precisionScale = getScalePrecisionFromDecimalType(type); return new DecimalColumnVector(defaultSize, precisionScale[0], precisionScale[1]); - } else { + } else if (type.equalsIgnoreCase("long")) { return new LongColumnVector(defaultSize); + } else { + return null; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java index 37ef8fa..eca239b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/AbstractFilterStringColLikeStringScalar.java @@ -420,8 +420,8 @@ public void setPattern(String pattern) { VectorExpressionDescriptor.Mode.FILTER) .setNumArguments(2) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN, VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastBooleanToCharViaLongToChar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastBooleanToCharViaLongToChar.java new file mode 100644 index 0000000..e88ff5b --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastBooleanToCharViaLongToChar.java @@ -0,0 +1,50 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + +public class CastBooleanToCharViaLongToChar extends CastBooleanToStringViaLongToString implements TruncStringOutput { + private static final long serialVersionUID = 1L; + private int maxLength; // Must be manually set with setMaxLength. + + public CastBooleanToCharViaLongToChar(int inputColumn, int outputColumn) { + super(inputColumn, outputColumn); + } + + @Override + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + StringExpr.rightTrimAndTruncate(outV, i, bytes, 0, length, maxLength); + } + + @Override + public String getOutputType() { + return "Char"; + } + + @Override + public int getMaxLength() { + return maxLength; + } + + @Override + public void setMaxLength(int maxLength) { + this.maxLength = maxLength; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastBooleanToStringViaLongToString.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastBooleanToStringViaLongToString.java index d16bbb1..45288ca 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastBooleanToStringViaLongToString.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastBooleanToStringViaLongToString.java @@ -22,17 +22,14 @@ public class CastBooleanToStringViaLongToString extends LongToStringUnaryUDF { private static final long serialVersionUID = 1L; - private transient byte[] temp; // space to put date string private static final byte[][] dictionary = { {'F', 'A', 'L', 'S', 'E'}, {'T', 'R', 'U', 'E'} }; - public CastBooleanToStringViaLongToString() { - super(); - temp = new byte[8]; - } - public CastBooleanToStringViaLongToString(int inputColumn, int outputColumn) { super(inputColumn, outputColumn); - temp = new byte[8]; + } + + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + outV.setVal(i, bytes, 0, length); } @Override @@ -41,6 +38,6 @@ protected void func(BytesColumnVector outV, long[] vector, int i) { /* 0 is false and 1 is true in the input vector, so a simple dictionary is used * with two entries. 0 references FALSE and 1 references TRUE in the dictionary. */ - outV.setVal(i, dictionary[(int) vector[i]], 0, dictionary[(int) vector[i]].length); + assign(outV, i, dictionary[(int) vector[i]], dictionary[(int) vector[i]].length); } -} +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastBooleanToVarCharViaLongToVarChar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastBooleanToVarCharViaLongToVarChar.java new file mode 100644 index 0000000..1d7bb99 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastBooleanToVarCharViaLongToVarChar.java @@ -0,0 +1,50 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + +public class CastBooleanToVarCharViaLongToVarChar extends CastBooleanToStringViaLongToString implements TruncStringOutput { + private static final long serialVersionUID = 1L; + private int maxLength; // Must be manually set with setMaxLength. + + public CastBooleanToVarCharViaLongToVarChar(int inputColumn, int outputColumn) { + super(inputColumn, outputColumn); + } + + @Override + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + StringExpr.truncate(outV, i, bytes, 0, length, maxLength); + } + + @Override + public String getOutputType() { + return "Char"; + } + + @Override + public int getMaxLength() { + return maxLength; + } + + @Override + public void setMaxLength(int maxLength) { + this.maxLength = maxLength; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToChar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToChar.java new file mode 100644 index 0000000..187f12b --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToChar.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + +public class CastDateToChar extends CastDateToString implements TruncStringOutput { + private static final long serialVersionUID = 1L; + private int maxLength; // Must be manually set with setMaxLength. + + public CastDateToChar() { + super(); + } + + public CastDateToChar(int inputColumn, int outputColumn) { + super(inputColumn, outputColumn); + } + + @Override + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + StringExpr.rightTrimAndTruncate(outV, i, bytes, 0, length, maxLength); + } + + @Override + public String getOutputType() { + return "Char"; + } + + @Override + public int getMaxLength() { + return maxLength; + } + + @Override + public void setMaxLength(int maxLength) { + this.maxLength = maxLength; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java index 39334b8..00a974f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java @@ -35,10 +35,15 @@ public CastDateToString(int inputColumn, int outputColumn) { super(inputColumn, outputColumn); } + // The assign method will be overridden for CHAR and VARCHAR. + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + outV.setVal(i, bytes, 0, length); + } + @Override protected void func(BytesColumnVector outV, long[] vector, int i) { dt.setTime(DateWritable.daysToMillis((int) vector[i])); byte[] temp = dt.toString().getBytes(); - outV.setVal(i, temp, 0, temp.length); + assign(outV, i, temp, temp.length); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToVarChar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToVarChar.java new file mode 100644 index 0000000..5ad745c --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToVarChar.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + +public class CastDateToVarChar extends CastDateToString implements TruncStringOutput { + private static final long serialVersionUID = 1L; + private int maxLength; // Must be manually set with setMaxLength. + + public CastDateToVarChar() { + super(); + } + + public CastDateToVarChar(int inputColumn, int outputColumn) { + super(inputColumn, outputColumn); + } + + @Override + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + StringExpr.truncate(outV, i, bytes, 0, length, maxLength); + } + + @Override + public String getOutputType() { + return "VarChar"; + } + + @Override + public int getMaxLength() { + return maxLength; + } + + @Override + public void setMaxLength(int maxLength) { + this.maxLength = maxLength; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToChar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToChar.java new file mode 100644 index 0000000..aab3e70 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToChar.java @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + +/** + * To support vectorized cast of decimal to string. + */ +public class CastDecimalToChar extends CastDecimalToString implements TruncStringOutput { + + private static final long serialVersionUID = 1L; + private int maxLength; // Must be manually set with setMaxLength. + + public CastDecimalToChar() { + super(); + } + + public CastDecimalToChar(int inputColumn, int outputColumn) { + super(inputColumn, outputColumn); + } + + @Override + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + StringExpr.rightTrimAndTruncate(outV, i, bytes, 0, length, maxLength); + } + + @Override + public String getOutputType() { + return "Char"; + } + + @Override + public int getMaxLength() { + return maxLength; + } + + @Override + public void setMaxLength(int maxLength) { + this.maxLength = maxLength; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java index a436fa8..fa0143f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java index c0a99b7..6d01498 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java @@ -36,6 +36,11 @@ public CastDecimalToString(int inputColumn, int outputColumn) { super(inputColumn, outputColumn); } + // The assign method will be overridden for CHAR and VARCHAR. + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + outV.setVal(i, bytes, 0, length); + } + @Override protected void func(BytesColumnVector outV, DecimalColumnVector inV, int i) { String s = inV.vector[i].getHiveDecimalString(); @@ -47,6 +52,6 @@ protected void func(BytesColumnVector outV, DecimalColumnVector inV, int i) { // This should never happen. If it does, there is a bug. throw new RuntimeException("Internal error: unable to convert decimal to string"); } - outV.setVal(i, b, 0, b.length); + assign(outV, i, b, b.length); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToVarChar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToVarChar.java new file mode 100644 index 0000000..267b0b1 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToVarChar.java @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + +/** + * To support vectorized cast of decimal to string. + */ +public class CastDecimalToVarChar extends CastDecimalToString implements TruncStringOutput { + + private static final long serialVersionUID = 1L; + private int maxLength; // Must be manually set with setMaxLength. + + public CastDecimalToVarChar() { + super(); + } + + public CastDecimalToVarChar(int inputColumn, int outputColumn) { + super(inputColumn, outputColumn); + } + + @Override + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + StringExpr.truncate(outV, i, bytes, 0, length, maxLength); + } + + @Override + public String getOutputType() { + return "VarChar"; + } + + @Override + public int getMaxLength() { + return maxLength; + } + + @Override + public void setMaxLength(int maxLength) { + this.maxLength = maxLength; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToChar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToChar.java new file mode 100644 index 0000000..27674c4 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToChar.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + +public class CastLongToChar extends CastLongToString implements TruncStringOutput { + private static final long serialVersionUID = 1L; + private int maxLength; // Must be manually set with setMaxLength. + + public CastLongToChar() { + super(); + } + + public CastLongToChar(int inputColumn, int outputColumn) { + super(inputColumn, outputColumn); + } + + @Override + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + StringExpr.rightTrimAndTruncate(outV, i, bytes, 0, length, maxLength); + } + + @Override + public String getOutputType() { + return "Char"; + } + + @Override + public int getMaxLength() { + return maxLength; + } + + @Override + public void setMaxLength(int maxLength) { + this.maxLength = maxLength; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToString.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToString.java index 43bdfc2..cdfc387 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToString.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToString.java @@ -34,9 +34,14 @@ public CastLongToString(int inputColumn, int outputColumn) { temp = new byte[20]; } + // The assign method will be overridden for CHAR and VARCHAR. + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + outV.setVal(i, bytes, 0, length); + } + @Override protected void func(BytesColumnVector outV, long[] vector, int i) { int len = MathExpr.writeLongToUTF8(temp, vector[i]); - outV.setVal(i, temp, 0, len); + assign(outV, i, temp, len); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToVarChar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToVarChar.java new file mode 100644 index 0000000..7c3dca2 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToVarChar.java @@ -0,0 +1,54 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + +public class CastLongToVarChar extends CastLongToString implements TruncStringOutput { + private static final long serialVersionUID = 1L; + private int maxLength; // Must be manually set with setMaxLength. + + public CastLongToVarChar() { + super(); + } + + public CastLongToVarChar(int inputColumn, int outputColumn) { + super(inputColumn, outputColumn); + } + + @Override + protected void assign(BytesColumnVector outV, int i, byte[] bytes, int length) { + StringExpr.truncate(outV, i, bytes, 0, length, maxLength); + } + + @Override + public String getOutputType() { + return "VarChar"; + } + + @Override + public int getMaxLength() { + return maxLength; + } + + @Override + public void setMaxLength(int maxLength) { + this.maxLength = maxLength; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringGroupToChar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringGroupToChar.java new file mode 100644 index 0000000..7c06ff5 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringGroupToChar.java @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + +public class CastStringGroupToChar extends StringUnaryUDFDirect implements TruncStringOutput { + private static final long serialVersionUID = 1L; + private int maxLength; // Must be manually set with setMaxLength. + + public CastStringGroupToChar(int inputColumn, int outputColumn) { + super(inputColumn, outputColumn); + } + + public CastStringGroupToChar() { + super(); + } + + /** + * Do right trim and truncate for CHAR. + */ + protected void func(BytesColumnVector outV, byte[][] vector, int[] start, int[] length, int i) { + StringExpr.rightTrimAndTruncate(outV, i, vector[i], start[i], length[i], maxLength); + } + @Override + public String getOutputType() { + return "Char"; + } + + @Override + public int getMaxLength() { + return maxLength; + } + + @Override + public void setMaxLength(int maxLength) { + this.maxLength = maxLength; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringGroupToVarChar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringGroupToVarChar.java new file mode 100644 index 0000000..376ce92 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringGroupToVarChar.java @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + +public class CastStringGroupToVarChar extends StringUnaryUDFDirect implements TruncStringOutput { + private static final long serialVersionUID = 1L; + private int maxLength; // Must be manually set with setMaxLength. + + public CastStringGroupToVarChar(int inputColumn, int outputColumn) { + super(inputColumn, outputColumn); + } + + public CastStringGroupToVarChar() { + super(); + } + + /** + * Do right trim and truncate for CHAR. + */ + protected void func(BytesColumnVector outV, byte[][] vector, int[] start, int[] length, int i) { + StringExpr.truncate(outV, i, vector[i], start[i], length[i], maxLength); + } + @Override + public String getOutputType() { + return "VarChar"; + } + + @Override + public int getMaxLength() { + return maxLength; + } + + @Override + public void setMaxLength(int maxLength) { + this.maxLength = maxLength; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java index 1d07615..e3e860c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDate.java @@ -154,7 +154,7 @@ public String getOutputType() { b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(1) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING) + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN); return b.build(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java index 7317141..b4ac2b1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java @@ -159,7 +159,7 @@ public void setInputColumn(int inputColumn) { b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(1) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING) + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN); return b.build(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalToStringUnaryUDF.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalToStringUnaryUDF.java index 0e41a7c..4b1182c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalToStringUnaryUDF.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalToStringUnaryUDF.java @@ -130,7 +130,7 @@ public void setInputColumn(int inputColumn) { @Override public String getOutputType() { - return "Decimal"; + return "String"; } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprCharScalarStringGroupColumn.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprCharScalarStringGroupColumn.java new file mode 100644 index 0000000..bc5fb5a --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprCharScalarStringGroupColumn.java @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.common.type.HiveChar; + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input column expressions. + * The first is always a boolean (LongColumnVector). + * The second is a string scalar. + * The third is a string column or non-constant expression result. + */ +public class IfExprCharScalarStringGroupColumn extends IfExprStringScalarStringGroupColumn { + + private static final long serialVersionUID = 1L; + + public IfExprCharScalarStringGroupColumn(int arg1Column, HiveChar arg2Scalar, int arg3Column, int outputColumn) { + super(arg1Column, arg2Scalar.getValue().getBytes(), arg3Column, outputColumn); + } + + public IfExprCharScalarStringGroupColumn() { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.CHAR, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprCharScalarStringScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprCharScalarStringScalar.java new file mode 100644 index 0000000..0fccf15 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprCharScalarStringScalar.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.common.type.HiveChar; + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input column expressions. + * The first is always a boolean (LongColumnVector). + * The second is a string scalar. + * The third is a string scalar. + */ +public class IfExprCharScalarStringScalar extends IfExprStringScalarStringScalar { + + private static final long serialVersionUID = 1L; + + public IfExprCharScalarStringScalar( + int arg1Column, HiveChar arg2Scalar, byte[] arg3Scalar, int outputColumn) { + super(arg1Column, arg2Scalar.getValue().getBytes(), arg3Scalar, outputColumn); + } + + public IfExprCharScalarStringScalar() { + } + + @Override + public String getOutputType() { + return "String"; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.CHAR, + VectorExpressionDescriptor.ArgumentType.STRING) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringColumnStringColumn.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringColumnStringColumn.java deleted file mode 100644 index 7cab0e5..0000000 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringColumnStringColumn.java +++ /dev/null @@ -1,221 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.vector.expressions; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; - -/** - * Compute IF(expr1, expr2, expr3) for 3 input column expressions. - * The first is always a boolean (LongColumnVector). - * The second and third are string columns or string expression results. - */ -public class IfExprStringColumnStringColumn extends VectorExpression { - - private static final long serialVersionUID = 1L; - - private int arg1Column, arg2Column, arg3Column; - private int outputColumn; - - public IfExprStringColumnStringColumn(int arg1Column, int arg2Column, int arg3Column, int outputColumn) { - this.arg1Column = arg1Column; - this.arg2Column = arg2Column; - this.arg3Column = arg3Column; - this.outputColumn = outputColumn; - } - - public IfExprStringColumnStringColumn() { - } - - @Override - public void evaluate(VectorizedRowBatch batch) { - - if (childExpressions != null) { - super.evaluateChildren(batch); - } - - LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column]; - BytesColumnVector arg2ColVector = (BytesColumnVector) batch.cols[arg2Column]; - BytesColumnVector arg3ColVector = (BytesColumnVector) batch.cols[arg3Column]; - BytesColumnVector outputColVector = (BytesColumnVector) batch.cols[outputColumn]; - int[] sel = batch.selected; - boolean[] outputIsNull = outputColVector.isNull; - outputColVector.noNulls = arg2ColVector.noNulls && arg3ColVector.noNulls; - outputColVector.isRepeating = false; // may override later - int n = batch.size; - long[] vector1 = arg1ColVector.vector; - - // return immediately if batch is empty - if (n == 0) { - return; - } - - outputColVector.initBuffer(); - - /* All the code paths below propagate nulls even if neither arg2 nor arg3 - * have nulls. This is to reduce the number of code paths and shorten the - * code, at the expense of maybe doing unnecessary work if neither input - * has nulls. This could be improved in the future by expanding the number - * of code paths. - */ - if (arg1ColVector.isRepeating) { - if (vector1[0] == 1) { - arg2ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector); - } else { - arg3ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector); - } - return; - } - - // extend any repeating values and noNulls indicator in the inputs - arg2ColVector.flatten(batch.selectedInUse, sel, n); - arg3ColVector.flatten(batch.selectedInUse, sel, n); - - if (arg1ColVector.noNulls) { - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (vector1[i] == 1) { - if (!arg2ColVector.isNull[i]) { - outputColVector.setVal( - i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); - } - } else { - if (!arg3ColVector.isNull[i]) { - outputColVector.setVal( - i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); - } - } - outputIsNull[i] = (vector1[i] == 1 ? - arg2ColVector.isNull[i] : arg3ColVector.isNull[i]); - } - } else { - for(int i = 0; i != n; i++) { - if (vector1[i] == 1) { - if (!arg2ColVector.isNull[i]) { - outputColVector.setVal( - i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); - } - } else { - if (!arg3ColVector.isNull[i]) { - outputColVector.setVal( - i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); - } - } - outputIsNull[i] = (vector1[i] == 1 ? - arg2ColVector.isNull[i] : arg3ColVector.isNull[i]); - } - } - } else /* there are nulls */ { - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (!arg1ColVector.isNull[i] && vector1[i] == 1) { - if (!arg2ColVector.isNull[i]) { - outputColVector.setVal( - i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); - } - } else { - if (!arg3ColVector.isNull[i]) { - outputColVector.setVal( - i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); - } - } - outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? - arg2ColVector.isNull[i] : arg3ColVector.isNull[i]); - } - } else { - for(int i = 0; i != n; i++) { - if (!arg1ColVector.isNull[i] && vector1[i] == 1) { - if (!arg2ColVector.isNull[i]) { - outputColVector.setVal( - i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); - } - } else { - if (!arg3ColVector.isNull[i]) { - outputColVector.setVal( - i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); - } - } - outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? - arg2ColVector.isNull[i] : arg3ColVector.isNull[i]); - } - } - } - arg2ColVector.unFlatten(); - arg3ColVector.unFlatten(); - } - - @Override - public int getOutputColumn() { - return outputColumn; - } - - @Override - public String getOutputType() { - return "String"; - } - - public int getArg1Column() { - return arg1Column; - } - - public void setArg1Column(int colNum) { - this.arg1Column = colNum; - } - - public int getArg2Column() { - return arg2Column; - } - - public void setArg2Column(int colNum) { - this.arg2Column = colNum; - } - - public int getArg3Column() { - return arg3Column; - } - - public void setArg3Column(int colNum) { - this.arg3Column = colNum; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - - @Override - public VectorExpressionDescriptor.Descriptor getDescriptor() { - return (new VectorExpressionDescriptor.Builder()) - .setMode( - VectorExpressionDescriptor.Mode.PROJECTION) - .setNumArguments(3) - .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("long"), - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) - .setInputExpressionTypes( - VectorExpressionDescriptor.InputExpressionType.COLUMN, - VectorExpressionDescriptor.InputExpressionType.COLUMN, - VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); - } -} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringColumnStringScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringColumnStringScalar.java deleted file mode 100644 index 159d2a2..0000000 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringColumnStringScalar.java +++ /dev/null @@ -1,208 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.vector.expressions; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; - -/** - * Compute IF(expr1, expr2, expr3) for 3 input expressions. - * The first is always a boolean (LongColumnVector). - * The second is a string column expression. - * The third is a string scalar. - */ -public class IfExprStringColumnStringScalar extends VectorExpression { - - private static final long serialVersionUID = 1L; - - private int arg1Column, arg2Column; - private byte[] arg3Scalar; - private int outputColumn; - - public IfExprStringColumnStringScalar(int arg1Column, int arg2Column, byte[] arg3Scalar, int outputColumn) { - this.arg1Column = arg1Column; - this.arg2Column = arg2Column; - this.arg3Scalar = arg3Scalar; - this.outputColumn = outputColumn; - } - - public IfExprStringColumnStringScalar() { - } - - @Override - public void evaluate(VectorizedRowBatch batch) { - - if (childExpressions != null) { - super.evaluateChildren(batch); - } - - LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column]; - BytesColumnVector arg2ColVector = (BytesColumnVector) batch.cols[arg2Column]; - BytesColumnVector outputColVector = (BytesColumnVector) batch.cols[outputColumn]; - int[] sel = batch.selected; - boolean[] outputIsNull = outputColVector.isNull; - outputColVector.noNulls = arg2ColVector.noNulls; - outputColVector.isRepeating = false; // may override later - int n = batch.size; - long[] vector1 = arg1ColVector.vector; - - // return immediately if batch is empty - if (n == 0) { - return; - } - - outputColVector.initBuffer(); - - /* All the code paths below propagate nulls even if arg2 has no nulls. - * This is to reduce the number of code paths and shorten the - * code, at the expense of maybe doing unnecessary work if neither input - * has nulls. This could be improved in the future by expanding the number - * of code paths. - */ - if (arg1ColVector.isRepeating) { - if (vector1[0] == 1) { - arg2ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector); - } else { - outputColVector.fill(arg3Scalar); - } - return; - } - - // extend any repeating values and noNulls indicator in the inputs - arg2ColVector.flatten(batch.selectedInUse, sel, n); - - if (arg1ColVector.noNulls) { - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (vector1[i] == 1) { - if (!arg2ColVector.isNull[i]) { - outputColVector.setVal( - i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); - } - } else { - outputColVector.setRef(i, arg3Scalar, 0, arg3Scalar.length); - } - outputIsNull[i] = (vector1[i] == 1 ? arg2ColVector.isNull[i] : false); - } - } else { - for(int i = 0; i != n; i++) { - if (vector1[i] == 1) { - if (!arg2ColVector.isNull[i]) { - outputColVector.setVal( - i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); - } - } else { - outputColVector.setRef(i, arg3Scalar, 0, arg3Scalar.length); - } - outputIsNull[i] = (vector1[i] == 1 ? arg2ColVector.isNull[i] : false); - } - } - } else /* there are nulls */ { - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (!arg1ColVector.isNull[i] && vector1[i] == 1) { - if (!arg2ColVector.isNull[i]) { - outputColVector.setVal( - i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); - } - } else { - outputColVector.setRef(i, arg3Scalar, 0, arg3Scalar.length); - } - outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? - arg2ColVector.isNull[i] : false); - } - } else { - for(int i = 0; i != n; i++) { - if (!arg1ColVector.isNull[i] && vector1[i] == 1) { - if (!arg2ColVector.isNull[i]) { - outputColVector.setVal( - i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); - } - } else { - outputColVector.setRef(i, arg3Scalar, 0, arg3Scalar.length); - } - outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? - arg2ColVector.isNull[i] : false); - } - } - } - - // restore state of repeating and non nulls indicators - arg2ColVector.unFlatten(); - } - - @Override - public int getOutputColumn() { - return outputColumn; - } - - @Override - public String getOutputType() { - return "String"; - } - - public int getArg1Column() { - return arg1Column; - } - - public void setArg1Column(int colNum) { - this.arg1Column = colNum; - } - - public int getArg2Column() { - return arg2Column; - } - - public void setArg2Column(int colNum) { - this.arg2Column = colNum; - } - - public byte[] getArg3Scalar() { - return arg3Scalar; - } - - public void setArg3Scalar(byte[] value) { - this.arg3Scalar = value; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - - @Override - public VectorExpressionDescriptor.Descriptor getDescriptor() { - return (new VectorExpressionDescriptor.Builder()) - .setMode( - VectorExpressionDescriptor.Mode.PROJECTION) - .setNumArguments(3) - .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("long"), - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) - .setInputExpressionTypes( - VectorExpressionDescriptor.InputExpressionType.COLUMN, - VectorExpressionDescriptor.InputExpressionType.COLUMN, - VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); - } -} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnCharScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnCharScalar.java new file mode 100644 index 0000000..46762f4 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnCharScalar.java @@ -0,0 +1,57 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.common.type.HiveChar; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input expressions. + * The first is always a boolean (LongColumnVector). + * The second is a string column expression. + * The third is a string scalar. + */ +public class IfExprStringGroupColumnCharScalar extends IfExprStringGroupColumnStringScalar { + + private static final long serialVersionUID = 1L; + + public IfExprStringGroupColumnCharScalar(int arg1Column, int arg2Column, HiveChar arg3Scalar, int outputColumn) { + super(arg1Column, arg2Column, arg3Scalar.getValue().getBytes(), outputColumn); + } + + public IfExprStringGroupColumnCharScalar() { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.CHAR) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnStringGroupColumn.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnStringGroupColumn.java new file mode 100644 index 0000000..c0c639b --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnStringGroupColumn.java @@ -0,0 +1,194 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input column expressions. + * The first is always a boolean (LongColumnVector). + * The second and third are string columns or string expression results. + */ +public class IfExprStringGroupColumnStringGroupColumn extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int arg1Column, arg2Column, arg3Column; + private int outputColumn; + + public IfExprStringGroupColumnStringGroupColumn(int arg1Column, int arg2Column, int arg3Column, int outputColumn) { + this.arg1Column = arg1Column; + this.arg2Column = arg2Column; + this.arg3Column = arg3Column; + this.outputColumn = outputColumn; + } + + public IfExprStringGroupColumnStringGroupColumn() { + super(); + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column]; + BytesColumnVector arg2ColVector = (BytesColumnVector) batch.cols[arg2Column]; + BytesColumnVector arg3ColVector = (BytesColumnVector) batch.cols[arg3Column]; + BytesColumnVector outputColVector = (BytesColumnVector) batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] outputIsNull = outputColVector.isNull; + outputColVector.noNulls = arg2ColVector.noNulls && arg3ColVector.noNulls; + outputColVector.isRepeating = false; // may override later + int n = batch.size; + long[] vector1 = arg1ColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + outputColVector.initBuffer(); + + /* All the code paths below propagate nulls even if neither arg2 nor arg3 + * have nulls. This is to reduce the number of code paths and shorten the + * code, at the expense of maybe doing unnecessary work if neither input + * has nulls. This could be improved in the future by expanding the number + * of code paths. + */ + if (arg1ColVector.isRepeating) { + if (vector1[0] == 1) { + arg2ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector); + } else { + arg3ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector); + } + return; + } + + // extend any repeating values and noNulls indicator in the inputs + arg2ColVector.flatten(batch.selectedInUse, sel, n); + arg3ColVector.flatten(batch.selectedInUse, sel, n); + + if (arg1ColVector.noNulls) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (vector1[i] == 1) { + if (!arg2ColVector.isNull[i]) { + outputColVector.setVal( + i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); + } + } else { + if (!arg3ColVector.isNull[i]) { + outputColVector.setVal( + i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); + } + } + outputIsNull[i] = (vector1[i] == 1 ? + arg2ColVector.isNull[i] : arg3ColVector.isNull[i]); + } + } else { + for(int i = 0; i != n; i++) { + if (vector1[i] == 1) { + if (!arg2ColVector.isNull[i]) { + outputColVector.setVal( + i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); + } + } else { + if (!arg3ColVector.isNull[i]) { + outputColVector.setVal( + i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); + } + } + outputIsNull[i] = (vector1[i] == 1 ? + arg2ColVector.isNull[i] : arg3ColVector.isNull[i]); + } + } + } else /* there are nulls */ { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!arg1ColVector.isNull[i] && vector1[i] == 1) { + if (!arg2ColVector.isNull[i]) { + outputColVector.setVal( + i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); + } + } else { + if (!arg3ColVector.isNull[i]) { + outputColVector.setVal( + i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); + } + } + outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? + arg2ColVector.isNull[i] : arg3ColVector.isNull[i]); + } + } else { + for(int i = 0; i != n; i++) { + if (!arg1ColVector.isNull[i] && vector1[i] == 1) { + if (!arg2ColVector.isNull[i]) { + outputColVector.setVal( + i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); + } + } else { + if (!arg3ColVector.isNull[i]) { + outputColVector.setVal( + i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); + } + } + outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? + arg2ColVector.isNull[i] : arg3ColVector.isNull[i]); + } + } + } + arg2ColVector.unFlatten(); + arg3ColVector.unFlatten(); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return "String"; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnStringScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnStringScalar.java new file mode 100644 index 0000000..744bfa5 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnStringScalar.java @@ -0,0 +1,181 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input expressions. + * The first is always a boolean (LongColumnVector). + * The second is a string column expression. + * The third is a string scalar. + */ +public class IfExprStringGroupColumnStringScalar extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int arg1Column, arg2Column; + private byte[] arg3Scalar; + private int outputColumn; + + public IfExprStringGroupColumnStringScalar(int arg1Column, int arg2Column, byte[] arg3Scalar, int outputColumn) { + this.arg1Column = arg1Column; + this.arg2Column = arg2Column; + this.arg3Scalar = arg3Scalar; + this.outputColumn = outputColumn; + } + + public IfExprStringGroupColumnStringScalar() { + super(); + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column]; + BytesColumnVector arg2ColVector = (BytesColumnVector) batch.cols[arg2Column]; + BytesColumnVector outputColVector = (BytesColumnVector) batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] outputIsNull = outputColVector.isNull; + outputColVector.noNulls = arg2ColVector.noNulls; + outputColVector.isRepeating = false; // may override later + int n = batch.size; + long[] vector1 = arg1ColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + outputColVector.initBuffer(); + + /* All the code paths below propagate nulls even if arg2 has no nulls. + * This is to reduce the number of code paths and shorten the + * code, at the expense of maybe doing unnecessary work if neither input + * has nulls. This could be improved in the future by expanding the number + * of code paths. + */ + if (arg1ColVector.isRepeating) { + if (vector1[0] == 1) { + arg2ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector); + } else { + outputColVector.fill(arg3Scalar); + } + return; + } + + // extend any repeating values and noNulls indicator in the inputs + arg2ColVector.flatten(batch.selectedInUse, sel, n); + + if (arg1ColVector.noNulls) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (vector1[i] == 1) { + if (!arg2ColVector.isNull[i]) { + outputColVector.setVal( + i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); + } + } else { + outputColVector.setRef(i, arg3Scalar, 0, arg3Scalar.length); + } + outputIsNull[i] = (vector1[i] == 1 ? arg2ColVector.isNull[i] : false); + } + } else { + for(int i = 0; i != n; i++) { + if (vector1[i] == 1) { + if (!arg2ColVector.isNull[i]) { + outputColVector.setVal( + i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); + } + } else { + outputColVector.setRef(i, arg3Scalar, 0, arg3Scalar.length); + } + outputIsNull[i] = (vector1[i] == 1 ? arg2ColVector.isNull[i] : false); + } + } + } else /* there are nulls */ { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!arg1ColVector.isNull[i] && vector1[i] == 1) { + if (!arg2ColVector.isNull[i]) { + outputColVector.setVal( + i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); + } + } else { + outputColVector.setRef(i, arg3Scalar, 0, arg3Scalar.length); + } + outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? + arg2ColVector.isNull[i] : false); + } + } else { + for(int i = 0; i != n; i++) { + if (!arg1ColVector.isNull[i] && vector1[i] == 1) { + if (!arg2ColVector.isNull[i]) { + outputColVector.setVal( + i, arg2ColVector.vector[i], arg2ColVector.start[i], arg2ColVector.length[i]); + } + } else { + outputColVector.setRef(i, arg3Scalar, 0, arg3Scalar.length); + } + outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? + arg2ColVector.isNull[i] : false); + } + } + } + + // restore state of repeating and non nulls indicators + arg2ColVector.unFlatten(); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return "String"; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.STRING) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnVarCharScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnVarCharScalar.java new file mode 100644 index 0000000..f0bf76e --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringGroupColumnVarCharScalar.java @@ -0,0 +1,57 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.common.type.HiveVarchar; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input expressions. + * The first is always a boolean (LongColumnVector). + * The second is a string column expression. + * The third is a string scalar. + */ +public class IfExprStringGroupColumnVarCharScalar extends IfExprStringGroupColumnStringScalar { + + private static final long serialVersionUID = 1L; + + public IfExprStringGroupColumnVarCharScalar(int arg1Column, int arg2Column, HiveVarchar arg3Scalar, int outputColumn) { + super(arg1Column, arg2Column, arg3Scalar.getValue().getBytes(), outputColumn); + } + + public IfExprStringGroupColumnVarCharScalar() { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.VARCHAR) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarCharScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarCharScalar.java new file mode 100644 index 0000000..9707027 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarCharScalar.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.common.type.HiveChar; + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input column expressions. + * The first is always a boolean (LongColumnVector). + * The second is a string scalar. + * The third is a string scalar. + */ +public class IfExprStringScalarCharScalar extends IfExprStringScalarStringScalar { + + private static final long serialVersionUID = 1L; + + public IfExprStringScalarCharScalar( + int arg1Column, byte[] arg2Scalar, HiveChar arg3Scalar, int outputColumn) { + super(arg1Column, arg2Scalar, arg3Scalar.getValue().getBytes(), outputColumn); + } + + public IfExprStringScalarCharScalar() { + } + + @Override + public String getOutputType() { + return "String"; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.STRING, + VectorExpressionDescriptor.ArgumentType.CHAR) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarStringColumn.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarStringColumn.java deleted file mode 100644 index 562db72..0000000 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarStringColumn.java +++ /dev/null @@ -1,208 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.exec.vector.expressions; - -import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; -import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; -import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; - -/** - * Compute IF(expr1, expr2, expr3) for 3 input column expressions. - * The first is always a boolean (LongColumnVector). - * The second is a string scalar. - * The third is a string column or non-constant expression result. - */ -public class IfExprStringScalarStringColumn extends VectorExpression { - - private static final long serialVersionUID = 1L; - - private int arg1Column, arg3Column; - private byte[] arg2Scalar; - private int outputColumn; - - public IfExprStringScalarStringColumn(int arg1Column, byte[] arg2Scalar, int arg3Column, int outputColumn) { - this.arg1Column = arg1Column; - this.arg2Scalar = arg2Scalar; - this.arg3Column = arg3Column; - this.outputColumn = outputColumn; - } - - public IfExprStringScalarStringColumn() { - } - - @Override - public void evaluate(VectorizedRowBatch batch) { - - if (childExpressions != null) { - super.evaluateChildren(batch); - } - - LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column]; - BytesColumnVector arg3ColVector = (BytesColumnVector) batch.cols[arg3Column]; - BytesColumnVector outputColVector = (BytesColumnVector) batch.cols[outputColumn]; - int[] sel = batch.selected; - boolean[] outputIsNull = outputColVector.isNull; - outputColVector.noNulls = arg3ColVector.noNulls; - outputColVector.isRepeating = false; // may override later - int n = batch.size; - long[] vector1 = arg1ColVector.vector; - - // return immediately if batch is empty - if (n == 0) { - return; - } - - outputColVector.initBuffer(); - - /* All the code paths below propagate nulls even arg3 has no - * nulls. This is to reduce the number of code paths and shorten the - * code, at the expense of maybe doing unnecessary work if neither input - * has nulls. This could be improved in the future by expanding the number - * of code paths. - */ - if (arg1ColVector.isRepeating) { - if (vector1[0] == 1) { - outputColVector.fill(arg2Scalar); - } else { - arg3ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector); - } - return; - } - - // extend any repeating values and noNulls indicator in the input - arg3ColVector.flatten(batch.selectedInUse, sel, n); - - if (arg1ColVector.noNulls) { - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (vector1[i] == 1) { - outputColVector.setRef(i, arg2Scalar, 0, arg2Scalar.length); - } else { - if (!arg3ColVector.isNull[i]) { - outputColVector.setVal( - i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); - } - } - outputIsNull[i] = (vector1[i] == 1 ? false : arg3ColVector.isNull[i]); - } - } else { - for(int i = 0; i != n; i++) { - if (vector1[i] == 1) { - outputColVector.setRef(i, arg2Scalar, 0, arg2Scalar.length); - } else { - if (!arg3ColVector.isNull[i]) { - outputColVector.setVal( - i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); - } - } - outputIsNull[i] = (vector1[i] == 1 ? false : arg3ColVector.isNull[i]); - } - } - } else /* there are nulls */ { - if (batch.selectedInUse) { - for(int j = 0; j != n; j++) { - int i = sel[j]; - if (!arg1ColVector.isNull[i] && vector1[i] == 1) { - outputColVector.setRef(i, arg2Scalar, 0, arg2Scalar.length); - } else { - if (!arg3ColVector.isNull[i]) { - outputColVector.setVal( - i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); - } - } - outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? - false : arg3ColVector.isNull[i]); - } - } else { - for(int i = 0; i != n; i++) { - if (!arg1ColVector.isNull[i] && vector1[i] == 1) { - outputColVector.setRef(i, arg2Scalar, 0, arg2Scalar.length); - } else { - if (!arg3ColVector.isNull[i]) { - outputColVector.setVal( - i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); - } - } - outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? - false : arg3ColVector.isNull[i]); - } - } - } - - // restore state of repeating and non nulls indicators - arg3ColVector.unFlatten(); - } - - @Override - public int getOutputColumn() { - return outputColumn; - } - - @Override - public String getOutputType() { - return "String"; - } - - public int getArg1Column() { - return arg1Column; - } - - public void setArg1Column(int colNum) { - this.arg1Column = colNum; - } - - public byte[] getArg2Scalar() { - return arg2Scalar; - } - - public void setArg2Scalar(byte[] value) { - this.arg2Scalar = value; - } - - public int getArg3Column() { - return arg3Column; - } - - public void setArg3Column(int colNum) { - this.arg3Column = colNum; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - - @Override - public VectorExpressionDescriptor.Descriptor getDescriptor() { - return (new VectorExpressionDescriptor.Builder()) - .setMode( - VectorExpressionDescriptor.Mode.PROJECTION) - .setNumArguments(3) - .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("long"), - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) - .setInputExpressionTypes( - VectorExpressionDescriptor.InputExpressionType.COLUMN, - VectorExpressionDescriptor.InputExpressionType.SCALAR, - VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); - } -} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarStringGroupColumn.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarStringGroupColumn.java new file mode 100644 index 0000000..151ac3e --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarStringGroupColumn.java @@ -0,0 +1,181 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; +import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; +import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input column expressions. + * The first is always a boolean (LongColumnVector). + * The second is a string scalar. + * The third is a string column or non-constant expression result. + */ +public class IfExprStringScalarStringGroupColumn extends VectorExpression { + + private static final long serialVersionUID = 1L; + + private int arg1Column, arg3Column; + private byte[] arg2Scalar; + private int outputColumn; + + public IfExprStringScalarStringGroupColumn(int arg1Column, byte[] arg2Scalar, int arg3Column, int outputColumn) { + this.arg1Column = arg1Column; + this.arg2Scalar = arg2Scalar; + this.arg3Column = arg3Column; + this.outputColumn = outputColumn; + } + + public IfExprStringScalarStringGroupColumn() { + super(); + } + + @Override + public void evaluate(VectorizedRowBatch batch) { + + if (childExpressions != null) { + super.evaluateChildren(batch); + } + + LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column]; + BytesColumnVector arg3ColVector = (BytesColumnVector) batch.cols[arg3Column]; + BytesColumnVector outputColVector = (BytesColumnVector) batch.cols[outputColumn]; + int[] sel = batch.selected; + boolean[] outputIsNull = outputColVector.isNull; + outputColVector.noNulls = arg3ColVector.noNulls; + outputColVector.isRepeating = false; // may override later + int n = batch.size; + long[] vector1 = arg1ColVector.vector; + + // return immediately if batch is empty + if (n == 0) { + return; + } + + outputColVector.initBuffer(); + + /* All the code paths below propagate nulls even arg3 has no + * nulls. This is to reduce the number of code paths and shorten the + * code, at the expense of maybe doing unnecessary work if neither input + * has nulls. This could be improved in the future by expanding the number + * of code paths. + */ + if (arg1ColVector.isRepeating) { + if (vector1[0] == 1) { + outputColVector.fill(arg2Scalar); + } else { + arg3ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector); + } + return; + } + + // extend any repeating values and noNulls indicator in the input + arg3ColVector.flatten(batch.selectedInUse, sel, n); + + if (arg1ColVector.noNulls) { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (vector1[i] == 1) { + outputColVector.setRef(i, arg2Scalar, 0, arg2Scalar.length); + } else { + if (!arg3ColVector.isNull[i]) { + outputColVector.setVal( + i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); + } + } + outputIsNull[i] = (vector1[i] == 1 ? false : arg3ColVector.isNull[i]); + } + } else { + for(int i = 0; i != n; i++) { + if (vector1[i] == 1) { + outputColVector.setRef(i, arg2Scalar, 0, arg2Scalar.length); + } else { + if (!arg3ColVector.isNull[i]) { + outputColVector.setVal( + i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); + } + } + outputIsNull[i] = (vector1[i] == 1 ? false : arg3ColVector.isNull[i]); + } + } + } else /* there are nulls */ { + if (batch.selectedInUse) { + for(int j = 0; j != n; j++) { + int i = sel[j]; + if (!arg1ColVector.isNull[i] && vector1[i] == 1) { + outputColVector.setRef(i, arg2Scalar, 0, arg2Scalar.length); + } else { + if (!arg3ColVector.isNull[i]) { + outputColVector.setVal( + i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); + } + } + outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? + false : arg3ColVector.isNull[i]); + } + } else { + for(int i = 0; i != n; i++) { + if (!arg1ColVector.isNull[i] && vector1[i] == 1) { + outputColVector.setRef(i, arg2Scalar, 0, arg2Scalar.length); + } else { + if (!arg3ColVector.isNull[i]) { + outputColVector.setVal( + i, arg3ColVector.vector[i], arg3ColVector.start[i], arg3ColVector.length[i]); + } + } + outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ? + false : arg3ColVector.isNull[i]); + } + } + } + + // restore state of repeating and non nulls indicators + arg3ColVector.unFlatten(); + } + + @Override + public int getOutputColumn() { + return outputColumn; + } + + @Override + public String getOutputType() { + return "String"; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.STRING, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarStringScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarStringScalar.java index f6fcfea..6ca3af1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarStringScalar.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarStringScalar.java @@ -132,34 +132,6 @@ public String getOutputType() { return "String"; } - public int getArg1Column() { - return arg1Column; - } - - public void setArg1Column(int colNum) { - this.arg1Column = colNum; - } - - public byte[] getArg2Scalar() { - return arg2Scalar; - } - - public void setArg2Scalar(byte[] value) { - this.arg2Scalar = value; - } - - public byte[] getArg3Scalar() { - return arg3Scalar; - } - - public void setArg3Scalar(byte[] value) { - this.arg3Scalar = value; - } - - public void setOutputColumn(int outputColumn) { - this.outputColumn = outputColumn; - } - @Override public VectorExpressionDescriptor.Descriptor getDescriptor() { return (new VectorExpressionDescriptor.Builder()) @@ -167,9 +139,9 @@ public void setOutputColumn(int outputColumn) { VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(3) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.getType("long"), - VectorExpressionDescriptor.ArgumentType.getType("string"), - VectorExpressionDescriptor.ArgumentType.getType("string")) + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.STRING, + VectorExpressionDescriptor.ArgumentType.STRING) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN, VectorExpressionDescriptor.InputExpressionType.SCALAR, diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarVarCharScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarVarCharScalar.java new file mode 100644 index 0000000..86324b9 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprStringScalarVarCharScalar.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.common.type.HiveVarchar; + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input column expressions. + * The first is always a boolean (LongColumnVector). + * The second is a string scalar. + * The third is a string scalar. + */ +public class IfExprStringScalarVarCharScalar extends IfExprStringScalarStringScalar { + + private static final long serialVersionUID = 1L; + + public IfExprStringScalarVarCharScalar( + int arg1Column, byte[] arg2Scalar, HiveVarchar arg3Scalar, int outputColumn) { + super(arg1Column, arg2Scalar, arg3Scalar.getValue().getBytes(), outputColumn); + } + + public IfExprStringScalarVarCharScalar() { + } + + @Override + public String getOutputType() { + return "String"; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.STRING, + VectorExpressionDescriptor.ArgumentType.VARCHAR) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprVarCharScalarStringGroupColumn.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprVarCharScalarStringGroupColumn.java new file mode 100644 index 0000000..0664b15 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprVarCharScalarStringGroupColumn.java @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.common.type.HiveVarchar; + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input column expressions. + * The first is always a boolean (LongColumnVector). + * The second is a string scalar. + * The third is a string column or non-constant expression result. + */ +public class IfExprVarCharScalarStringGroupColumn extends IfExprStringScalarStringGroupColumn { + + private static final long serialVersionUID = 1L; + + public IfExprVarCharScalarStringGroupColumn(int arg1Column, HiveVarchar arg2Scalar, int arg3Column, int outputColumn) { + super(arg1Column, arg2Scalar.getValue().getBytes(), arg3Column, outputColumn); + } + + public IfExprVarCharScalarStringGroupColumn() { + super(); + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.VARCHAR, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprVarCharScalarStringScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprVarCharScalarStringScalar.java new file mode 100644 index 0000000..11844d5 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprVarCharScalarStringScalar.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +import org.apache.hadoop.hive.common.type.HiveVarchar; + +import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; + +/** + * Compute IF(expr1, expr2, expr3) for 3 input column expressions. + * The first is always a boolean (LongColumnVector). + * The second is a string scalar. + * The third is a string scalar. + */ +public class IfExprVarCharScalarStringScalar extends IfExprStringScalarStringScalar { + + private static final long serialVersionUID = 1L; + + public IfExprVarCharScalarStringScalar( + int arg1Column, HiveVarchar arg2Scalar, byte[] arg3Scalar, int outputColumn) { + super(arg1Column, arg2Scalar.getValue().getBytes(), arg3Scalar, outputColumn); + } + + public IfExprVarCharScalarStringScalar() { + } + + @Override + public String getOutputType() { + return "String"; + } + + @Override + public VectorExpressionDescriptor.Descriptor getDescriptor() { + return (new VectorExpressionDescriptor.Builder()) + .setMode( + VectorExpressionDescriptor.Mode.PROJECTION) + .setNumArguments(3) + .setArgumentTypes( + VectorExpressionDescriptor.ArgumentType.LONG, + VectorExpressionDescriptor.ArgumentType.VARCHAR, + VectorExpressionDescriptor.ArgumentType.STRING) + .setInputExpressionTypes( + VectorExpressionDescriptor.InputExpressionType.COLUMN, + VectorExpressionDescriptor.InputExpressionType.SCALAR, + VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongToStringUnaryUDF.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongToStringUnaryUDF.java index 1603c12..bf79637 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongToStringUnaryUDF.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongToStringUnaryUDF.java @@ -18,9 +18,12 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor; +import org.apache.hadoop.hive.ql.exec.vector.VectorGroupByOperator; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; /** diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatColCol.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatColCol.java index e6c2c23..ba9cab1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatColCol.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatColCol.java @@ -446,8 +446,8 @@ public void setOutputColumn(int outputColumn) { VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(2) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING, - VectorExpressionDescriptor.ArgumentType.STRING) + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN, VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatColScalar.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatColScalar.java index 08b0eef..6110d47 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatColScalar.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatColScalar.java @@ -155,8 +155,8 @@ public void setOutputColumn(int outputColumn) { VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(2) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING, - VectorExpressionDescriptor.ArgumentType.STRING) + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN, VectorExpressionDescriptor.InputExpressionType.SCALAR).build(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatScalarCol.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatScalarCol.java index 63bb186..5b958b3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatScalarCol.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringConcatScalarCol.java @@ -155,8 +155,8 @@ public void setOutputColumn(int outputColumn) { VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(2) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING, - VectorExpressionDescriptor.ArgumentType.STRING) + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.SCALAR, VectorExpressionDescriptor.InputExpressionType.COLUMN).build(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringExpr.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringExpr.java index 92beb93..0e7404f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringExpr.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringExpr.java @@ -18,6 +18,10 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions; +import java.util.Arrays; + +import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; + /** * String expression evaluation helper functions. */ @@ -32,6 +36,7 @@ */ public static int compare(byte[] arg1, int start1, int len1, byte[] arg2, int start2, int len2) { for (int i = 0; i < len1 && i < len2; i++) { + // Note the "& 0xff" is just a way to convert unsigned bytes to signed integer. int b1 = arg1[i + start1] & 0xff; int b2 = arg2[i + start2] & 0xff; if (b1 != b2) { @@ -40,4 +45,265 @@ public static int compare(byte[] arg1, int start1, int len1, byte[] arg2, int st } return len1 - len2; } + + // A setVal with the same function signature as rightTrim, leftTrim, truncate, etc, below. + // Useful for class generation via templates. + public static void assign(BytesColumnVector outV, int i, byte[] bytes, int start, int length) { + // set output vector + outV.setVal(i, bytes, start, length); + } + + /* + * Left trim element i of the vector and return new start byte start. + * Ignore null handling. That will be handled separately. + */ + public static int leftTrim(byte[] bytes, int start, int length) { + int end = start + length; + + // skip past blank characters + int j = start; + while(j < end && bytes[j] == 0x20) { + j++; + } + + return j; + } + + /* + * Left trim element i of the vector and set the the result by value in outV. + * Ignore null handling. That will be handled separately. + */ + public static void leftTrim(BytesColumnVector outV, int i, byte[] bytes, int start, int length) { + int end = start + length; + + // skip past blank characters + int j = start; + while(j < end && bytes[j] == 0x20) { + j++; + } + + outV.setVal(i, bytes, j, length - (j - start)); + } + + /* + * Right trim element i of the vector and return the new byte length. + * Ignore null handling. That will be handled separately. + */ + public static int rightTrim(byte[] bytes, int start, int length) { + // skip trailing blank characters + int j = start + length - 1; + while(j >= start && bytes[j] == 0x20) { + j--; + } + + return (j - start) + 1; + } + + /* + * Right trim element i of the vector and place the result by value in outV. + * Ignore null handling. That will be handled separately. + */ + public static void rightTrim(BytesColumnVector outV, int i, byte[] bytes, int start, int length) { + // skip trailing blank characters + int j = start + length - 1; + while(j >= start && bytes[j] == 0x20) { + j--; + } + + // set output vector + outV.setVal(i, bytes, start, (j - start) + 1); + } + + /* + * Truncate element i of the vector to a maximum number of characters and + * return the new byte length. + * Ignore null handling. That will be handled separately. + */ + public static int truncate(byte[] bytes, int start, int length, int maxLength) { + int end = start + length; + + // count characters forward + int j = start; + int charCount = 0; + while(j < end) { + // UTF-8 continuation bytes have 2 high bits equal to 0x80. + if ((bytes[j] & 0xc0) != 0x80) { + if (charCount == maxLength) { + break; + } + ++charCount; + } + j++; + } + return (j - start); + } + + /* + * Truncate element i of the vector to a maximum number of characters and + * place the result by value in outV. + * Ignore null handling. That will be handled separately. + */ + public static void truncate(BytesColumnVector outV, int i, byte[] bytes, int start, int length, int maxLength) { + int end = start + length; + + // count characters forward + int j = start; + int charCount = 0; + while(j < end) { + // UTF-8 continuation bytes have 2 high bits equal to 0x80. + if ((bytes[j] & 0xc0) != 0x80) { + if (charCount == maxLength) { + break; + } + ++charCount; + } + j++; + } + + // set output vector + outV.setVal(i, bytes, start, (j - start)); + } + + /* + * Truncate a scalar vector to a maximum number of characters and + * return the truncated scalar bytes. + */ + public static byte[] truncateScalar(byte[] bytes, int maxLength) { + int end = bytes.length; + + // count characters forward + int j = 0; + int charCount = 0; + while(j < end) { + // UTF-8 continuation bytes have 2 high bits equal to 0x80. + if ((bytes[j] & 0xc0) != 0x80) { + if (charCount == maxLength) { + break; + } + ++charCount; + } + j++; + } + if (j == end) { + return bytes; + } else { + return Arrays.copyOf(bytes, j); + } + } + + /* + * Right trim and truncate element i of the vector to a maximum number of characters and + * return the new byte length. + * Ignore null handling. That will be handled separately. + */ + public static int rightTrimAndTruncate(byte[] bytes, int start, int length, int maxLength) { + int end = start + length; + + // count characters forward and watch for final run of pads + int j = start; + int charCount = 0; + int padRunStart = -1; + while(j < end) { + // UTF-8 continuation bytes have 2 high bits equal to 0x80. + if ((bytes[j] & 0xc0) != 0x80) { + if (charCount == maxLength) { + break; + } + if (bytes[j] == 0x20) { + if (padRunStart == -1) { + padRunStart = j; + } + } else { + padRunStart = -1; + } + ++charCount; + } else { + padRunStart = -1; + } + j++; + } + if (padRunStart != -1) { + return (padRunStart - start); + } else { + return (j - start); + } + } + + /* + * Right trim and truncate element i of the vector to a maximum number of characters and + * place the result by value in outV. + * Ignore null handling. That will be handled separately. + */ + public static void rightTrimAndTruncate(BytesColumnVector outV, int i, byte[] bytes, int start, int length, int maxLength) { + int end = start + length; + + // count characters forward and watch for final run of pads + int j = start; + int charCount = 0; + int padRunStart = -1; + while(j < end) { + // UTF-8 continuation bytes have 2 high bits equal to 0x80. + if ((bytes[j] & 0xc0) != 0x80) { + if (charCount == maxLength) { + break; + } + if (bytes[j] == 0x20) { + if (padRunStart == -1) { + padRunStart = j; + } + } else { + padRunStart = -1; + } + ++charCount; + } else { + padRunStart = -1; + } + j++; + } + // set output vector + if (padRunStart != -1) { + outV.setVal(i, bytes, start, (padRunStart - start)); + } else { + outV.setVal(i, bytes, start, (j - start) ); + } + } + + /* + * Right trim and truncate a scalar vector to a maximum number of characters and + * return the trimmed and truncated scalar bytes. + */ + public static byte[] rightTrimAndTruncateScalar(byte[] bytes, int maxLength) { + int end = bytes.length; + + // count characters forward and watch for final run of pads + int j = 0; + int charCount = 0; + int padRunStart = -1; + while(j < end) { + // UTF-8 continuation bytes have 2 high bits equal to 0x80. + if ((bytes[j] & 0xc0) != 0x80) { + if (charCount == maxLength) { + break; + } + if (bytes[j] == 0x20) { + if (padRunStart == -1) { + padRunStart = j; + } + } else { + padRunStart = -1; + } + ++charCount; + } else { + padRunStart = -1; + } + j++; + } + if (padRunStart != -1) { + return Arrays.copyOf(bytes, padRunStart); + } else if (j == end) { + return bytes; + } else { + return Arrays.copyOf(bytes, j); + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringLength.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringLength.java index b1b915e..edecf79 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringLength.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringLength.java @@ -162,7 +162,7 @@ public void setOutputColumn(int outputColumn) { b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(1) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING) + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN); return b.build(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringSubstrColStart.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringSubstrColStart.java index 057d4a3..6a41204 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringSubstrColStart.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringSubstrColStart.java @@ -250,7 +250,7 @@ public void setOutputColumn(int outputColumn) { b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(2) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, VectorExpressionDescriptor.ArgumentType.LONG) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN, diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringSubstrColStartLen.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringSubstrColStartLen.java index 477dc67..3ba4e67 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringSubstrColStartLen.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringSubstrColStartLen.java @@ -277,7 +277,7 @@ public void setOutputColumn(int outputColumn) { b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(3) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING, + VectorExpressionDescriptor.ArgumentType.STRING_GROUP, VectorExpressionDescriptor.ArgumentType.LONG, VectorExpressionDescriptor.ArgumentType.LONG) .setInputExpressionTypes( diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringUnaryUDF.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringUnaryUDF.java index 7985ca0..46f17db 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringUnaryUDF.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringUnaryUDF.java @@ -200,7 +200,7 @@ public void setOutputColumn(int outputColumn) { b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(1) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING) + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN); return b.build(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringUnaryUDFDirect.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringUnaryUDFDirect.java index 8050b59..7d11edb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringUnaryUDFDirect.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/StringUnaryUDFDirect.java @@ -142,7 +142,7 @@ public String getOutputType() { b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(1) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING) + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN); return b.build(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncStringOutput.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncStringOutput.java new file mode 100644 index 0000000..7cf1683 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/TruncStringOutput.java @@ -0,0 +1,25 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.vector.expressions; + +public interface TruncStringOutput { + abstract public int getMaxLength(); + + abstract public void setMaxLength(int maxLength); +} \ No newline at end of file diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java index c2bc012..a67cca3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java @@ -28,6 +28,7 @@ import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.common.type.Decimal128; +import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.vector.*; @@ -46,6 +47,7 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableDateObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableDoubleObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableFloatObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveCharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveDecimalObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableHiveVarcharObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.SettableIntObjectInspector; @@ -403,6 +405,9 @@ public static VectorExpressionWriter genVectorExpressionWritable( case STRING: return genVectorExpressionWritableString( (SettableStringObjectInspector) fieldObjInspector); + case CHAR: + return genVectorExpressionWritableChar( + (SettableHiveCharObjectInspector) fieldObjInspector); case VARCHAR: return genVectorExpressionWritableVarchar( (SettableHiveVarcharObjectInspector) fieldObjInspector); @@ -558,6 +563,46 @@ public Object initValue(Object ignored) { }.init(fieldObjInspector); } + private static VectorExpressionWriter genVectorExpressionWritableChar( + SettableHiveCharObjectInspector fieldObjInspector) throws HiveException { + return new VectorExpressionWriterBytes() { + private Object obj; + private Text text; + + public VectorExpressionWriter init(SettableHiveCharObjectInspector objInspector) + throws HiveException { + super.init(objInspector); + this.text = new Text(); + this.obj = initValue(null); + return this; + } + + @Override + public Object writeValue(byte[] value, int start, int length) throws HiveException { + text.set(value, start, length); + ((SettableHiveCharObjectInspector) this.objectInspector).set(this.obj, text.toString()); + return this.obj; + } + + @Override + public Object setValue(Object field, byte[] value, int start, int length) + throws HiveException { + if (null == field) { + field = initValue(null); + } + text.set(value, start, length); + ((SettableHiveCharObjectInspector) this.objectInspector).set(field, text.toString()); + return field; + } + + @Override + public Object initValue(Object ignored) { + return ((SettableHiveCharObjectInspector) this.objectInspector) + .create(new HiveChar(StringUtils.EMPTY, -1)); + } + }.init(fieldObjInspector); + } + private static VectorExpressionWriter genVectorExpressionWritableVarchar( SettableHiveVarcharObjectInspector fieldObjInspector) throws HiveException { return new VectorExpressionWriterBytes() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java index 2a8c6e9..d1fd674 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java @@ -181,7 +181,7 @@ public void setOutputColumn(int outputColumn) { b.setMode(VectorExpressionDescriptor.Mode.PROJECTION) .setNumArguments(1) .setArgumentTypes( - VectorExpressionDescriptor.ArgumentType.STRING) + VectorExpressionDescriptor.ArgumentType.STRING_GROUP) .setInputExpressionTypes( VectorExpressionDescriptor.InputExpressionType.COLUMN); return b.build(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java index fb91b9f..f98103a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.vector.*; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory; @@ -30,8 +31,12 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.serde2.io.DateWritable; +import org.apache.hadoop.hive.serde2.io.HiveCharWritable; +import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.*; +import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; import org.apache.hadoop.io.Text; /** @@ -215,6 +220,35 @@ private void setOutputCol(ColumnVector colVec, int i, Object value) { t = ((WritableStringObjectInspector) outputOI).getPrimitiveWritableObject(value); } bv.setVal(i, t.getBytes(), 0, t.getLength()); + } else if (outputOI instanceof WritableHiveCharObjectInspector) { + WritableHiveCharObjectInspector writableHiveCharObjectOI = (WritableHiveCharObjectInspector) outputOI; + int maxLength = ((CharTypeInfo) writableHiveCharObjectOI.getTypeInfo()).getLength(); + BytesColumnVector bv = (BytesColumnVector) colVec; + + HiveCharWritable hiveCharWritable; + if (value instanceof HiveCharWritable) { + hiveCharWritable = ((HiveCharWritable) value); + } else { + hiveCharWritable = writableHiveCharObjectOI.getPrimitiveWritableObject(value); + } + Text t = hiveCharWritable.getTextValue(); + + // In vector mode, we stored CHAR as unpadded. + StringExpr.rightTrimAndTruncate(bv, i, t.getBytes(), 0, t.getLength(), maxLength); + } else if (outputOI instanceof WritableHiveVarcharObjectInspector) { + WritableHiveVarcharObjectInspector writableHiveVarcharObjectOI = (WritableHiveVarcharObjectInspector) outputOI; + int maxLength = ((VarcharTypeInfo) writableHiveVarcharObjectOI.getTypeInfo()).getLength(); + BytesColumnVector bv = (BytesColumnVector) colVec; + + HiveVarcharWritable hiveVarcharWritable; + if (value instanceof HiveVarcharWritable) { + hiveVarcharWritable = ((HiveVarcharWritable) value); + } else { + hiveVarcharWritable = writableHiveVarcharObjectOI.getPrimitiveWritableObject(value); + } + Text t = hiveVarcharWritable.getTextValue(); + + StringExpr.truncate(bv, i, t.getBytes(), 0, t.getLength(), maxLength); } else if (outputOI instanceof WritableIntObjectInspector) { LongColumnVector lv = (LongColumnVector) colVec; if (value instanceof Integer) { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java index 5f93798..397fafe 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument.TruthValue; @@ -907,13 +908,19 @@ void skipRows(long items) throws IOException { } } - private static class BinaryTreeReader extends TreeReader{ - private InStream stream; - private IntegerReader lengths = null; + private static class BinaryTreeReader extends BytesTreeReader { + BinaryTreeReader(Path path, int columnId, Configuration conf) { + super(path, columnId, conf); + } + } - private final LongColumnVector scratchlcv; + private static class BytesTreeReader extends TreeReader{ + protected InStream stream; + protected IntegerReader lengths = null; - BinaryTreeReader(Path path, int columnId, Configuration conf) { + protected final LongColumnVector scratchlcv; + + BytesTreeReader(Path path, int columnId, Configuration conf) { super(path, columnId, conf); scratchlcv = new LongColumnVector(); } @@ -983,7 +990,7 @@ Object nextVector(Object previousVector, long batchSize) throws IOException { // Read present/isNull stream super.nextVector(result, batchSize); - BytesColumnVectorUtil.setRefToOrcByteArrays(stream, lengths, scratchlcv, result, batchSize); + BytesColumnVectorUtil.readOrcByteArrays(stream, lengths, scratchlcv, result, batchSize); return result; } @@ -1376,12 +1383,13 @@ void skipRows(long items) throws IOException { } } + // This class collects together very similar methods for reading an ORC vector of byte arrays and + // creating the BytesColumnVector. + // private static class BytesColumnVectorUtil { - // This method has the common code for reading in bytes into a BytesColumnVector. - // It is used by the BINARY, STRING, CHAR, VARCHAR types. - public static void setRefToOrcByteArrays(InStream stream, IntegerReader lengths, LongColumnVector scratchlcv, - BytesColumnVector result, long batchSize) throws IOException { + private static byte[] commonReadByteArrays(InStream stream, IntegerReader lengths, LongColumnVector scratchlcv, + BytesColumnVector result, long batchSize) throws IOException { // Read lengths scratchlcv.isNull = result.isNull; // Notice we are replacing the isNull vector here... lengths.nextVector(scratchlcv, batchSize); @@ -1409,11 +1417,20 @@ public static void setRefToOrcByteArrays(InStream stream, IntegerReader lengths, } len -= bytesRead; offset += bytesRead; - } + } + + return allBytes; + } + + // This method has the common code for reading in bytes into a BytesColumnVector. + public static void readOrcByteArrays(InStream stream, IntegerReader lengths, LongColumnVector scratchlcv, + BytesColumnVector result, long batchSize) throws IOException { + + byte[] allBytes = commonReadByteArrays(stream, lengths, scratchlcv, result, batchSize); // Too expensive to figure out 'repeating' by comparisons. result.isRepeating = false; - offset = 0; + int offset = 0; if (!scratchlcv.isRepeating) { for (int i = 0; i < batchSize; i++) { if (!scratchlcv.isNull[i]) { @@ -1518,7 +1535,7 @@ Object nextVector(Object previousVector, long batchSize) throws IOException { // Read present/isNull stream super.nextVector(result, batchSize); - BytesColumnVectorUtil.setRefToOrcByteArrays(stream, lengths, scratchlcv, result, batchSize); + BytesColumnVectorUtil.readOrcByteArrays(stream, lengths, scratchlcv, result, batchSize); return result; } @@ -1734,6 +1751,42 @@ Object next(Object previous) throws IOException { result.enforceMaxLength(maxLength); return result; } + + @Override + Object nextVector(Object previousVector, long batchSize) throws IOException { + // Get the vector of strings from StringTreeReader, then make a 2nd pass to + // adjust down the length (right trim and truncate) if necessary. + BytesColumnVector result = (BytesColumnVector) super.nextVector(previousVector, batchSize); + + int adjustedDownLen; + if (result.isRepeating) { + if (result.noNulls || !result.isNull[0]) { + adjustedDownLen = StringExpr.rightTrimAndTruncate(result.vector[0], result.start[0], result.length[0], maxLength); + if (adjustedDownLen < result.length[0]) { + result.setRef(0, result.vector[0], result.start[0], adjustedDownLen); + } + } + } else { + if (result.noNulls){ + for (int i = 0; i < batchSize; i++) { + adjustedDownLen = StringExpr.rightTrimAndTruncate(result.vector[i], result.start[i], result.length[i], maxLength); + if (adjustedDownLen < result.length[i]) { + result.setRef(i, result.vector[i], result.start[i], adjustedDownLen); + } + } + } else { + for (int i = 0; i < batchSize; i++) { + if (!result.isNull[i]) { + adjustedDownLen = StringExpr.rightTrimAndTruncate(result.vector[i], result.start[i], result.length[i], maxLength); + if (adjustedDownLen < result.length[i]) { + result.setRef(i, result.vector[i], result.start[i], adjustedDownLen); + } + } + } + } + } + return result; + } } private static class VarcharTreeReader extends StringTreeReader { @@ -1762,6 +1815,42 @@ Object next(Object previous) throws IOException { result.enforceMaxLength(maxLength); return result; } + + @Override + Object nextVector(Object previousVector, long batchSize) throws IOException { + // Get the vector of strings from StringTreeReader, then make a 2nd pass to + // adjust down the length (truncate) if necessary. + BytesColumnVector result = (BytesColumnVector) super.nextVector(previousVector, batchSize); + + int adjustedDownLen; + if (result.isRepeating) { + if (result.noNulls || !result.isNull[0]) { + adjustedDownLen = StringExpr.truncate(result.vector[0], result.start[0], result.length[0], maxLength); + if (adjustedDownLen < result.length[0]) { + result.setRef(0, result.vector[0], result.start[0], adjustedDownLen); + } + } + } else { + if (result.noNulls){ + for (int i = 0; i < batchSize; i++) { + adjustedDownLen = StringExpr.truncate(result.vector[i], result.start[i], result.length[i], maxLength); + if (adjustedDownLen < result.length[i]) { + result.setRef(i, result.vector[i], result.start[i], adjustedDownLen); + } + } + } else { + for (int i = 0; i < batchSize; i++) { + if (!result.isNull[i]) { + adjustedDownLen = StringExpr.truncate(result.vector[i], result.start[i], result.length[i], maxLength); + if (adjustedDownLen < result.length[i]) { + result.setRef(i, result.vector[i], result.start[i], adjustedDownLen); + } + } + } + } + } + return result; + } } private static class StructTreeReader extends TreeReader { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java index 76b4d03..afaa550 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java @@ -1060,6 +1060,7 @@ void writeStripe(OrcProto.StripeFooter.Builder builder, (!isDirectV2) || (rows.size() > 0 && (float)(dictionary.size()) / rows.size() <= dictionaryKeySizeThreshold); + useDictionaryEncoding = false; final int[] dumpOrder = new int[dictionary.size()]; if (useDictionaryEncoding) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java index a622095..5a6ff6a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java @@ -156,6 +156,10 @@ public Vectorizer() { // The regex matches only the "decimal" prefix of the type. patternBuilder.append("|decimal.*"); + // CHAR and VARCHAR types can be specified with maximum length. + patternBuilder.append("|char.*"); + patternBuilder.append("|varchar.*"); + supportedDataTypesPattern = Pattern.compile(patternBuilder.toString()); supportedGenericUDFs.add(GenericUDFOPPlus.class); @@ -248,6 +252,8 @@ public Vectorizer() { supportedGenericUDFs.add(GenericUDFTimestamp.class); supportedGenericUDFs.add(GenericUDFToDecimal.class); supportedGenericUDFs.add(GenericUDFToDate.class); + supportedGenericUDFs.add(GenericUDFToChar.class); + supportedGenericUDFs.add(GenericUDFToVarchar.class); // For conditional expressions supportedGenericUDFs.add(GenericUDFIf.class); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java index adf55c8..6ca6286 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java @@ -41,10 +41,20 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarLongScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongScalarDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnCharScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnVarCharScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprCharScalarStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringGroupColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarCharScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarVarCharScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprCharScalarStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringScalar; + + /** * IF(expr1,expr2,expr3)
@@ -60,8 +70,14 @@ IfExprLongScalarDoubleColumn.class, IfExprDoubleScalarLongColumn.class, IfExprLongScalarLongScalar.class, IfExprDoubleScalarDoubleScalar.class, IfExprLongScalarDoubleScalar.class, IfExprDoubleScalarLongScalar.class, - IfExprStringColumnStringColumn.class, IfExprStringColumnStringScalar.class, - IfExprStringScalarStringColumn.class, IfExprStringScalarStringScalar.class + IfExprStringGroupColumnStringGroupColumn.class, + IfExprStringGroupColumnStringScalar.class, + IfExprStringGroupColumnCharScalar.class, IfExprStringGroupColumnVarCharScalar.class, + IfExprStringScalarStringGroupColumn.class, + IfExprCharScalarStringGroupColumn.class, IfExprVarCharScalarStringGroupColumn.class, + IfExprStringScalarStringScalar.class, + IfExprStringScalarCharScalar.class, IfExprStringScalarVarCharScalar.class, + IfExprCharScalarStringScalar.class, IfExprVarCharScalarStringScalar.class, }) public class GenericUDFIf extends GenericUDF { private transient ObjectInspector[] argumentOIs; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java index cf104d3..bf00d71 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java @@ -34,9 +34,14 @@ DoubleColEqualLongScalar.class, DoubleColEqualDoubleScalar.class, LongScalarEqualLongColumn.class, LongScalarEqualDoubleColumn.class, DoubleScalarEqualLongColumn.class, DoubleScalarEqualDoubleColumn.class, - StringColEqualStringColumn.class, StringColEqualStringScalar.class, - StringScalarEqualStringColumn.class, FilterStringColEqualStringColumn.class, - FilterStringColEqualStringScalar.class, FilterStringScalarEqualStringColumn.class, + StringGroupColEqualStringGroupColumn.class, FilterStringGroupColEqualStringGroupColumn.class, + StringGroupColEqualStringScalar.class, + StringGroupColEqualVarCharScalar.class, StringGroupColEqualCharScalar.class, + StringScalarEqualStringGroupColumn.class, + VarCharScalarEqualStringGroupColumn.class, CharScalarEqualStringGroupColumn.class, + FilterStringGroupColEqualStringScalar.class, FilterStringScalarEqualStringGroupColumn.class, + FilterStringGroupColEqualVarCharScalar.class, FilterVarCharScalarEqualStringGroupColumn.class, + FilterStringGroupColEqualCharScalar.class, FilterCharScalarEqualStringGroupColumn.class, FilterLongColEqualLongColumn.class, FilterLongColEqualDoubleColumn.class, FilterDoubleColEqualLongColumn.class, FilterDoubleColEqualDoubleColumn.class, FilterLongColEqualLongScalar.class, FilterLongColEqualDoubleScalar.class, diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java index 9f8de39..fd612d0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java @@ -35,9 +35,14 @@ DoubleColGreaterEqualLongScalar.class, DoubleColGreaterEqualDoubleScalar.class, LongScalarGreaterEqualLongColumn.class, LongScalarGreaterEqualDoubleColumn.class, DoubleScalarGreaterEqualLongColumn.class, DoubleScalarGreaterEqualDoubleColumn.class, - StringColGreaterEqualStringColumn.class, StringColGreaterEqualStringScalar.class, - StringScalarGreaterEqualStringColumn.class, FilterStringColGreaterEqualStringColumn.class, - FilterStringColGreaterEqualStringScalar.class, FilterStringScalarGreaterEqualStringColumn.class, + StringGroupColGreaterEqualStringGroupColumn.class, FilterStringGroupColGreaterEqualStringGroupColumn.class, + StringGroupColGreaterEqualStringScalar.class, + StringGroupColGreaterEqualVarCharScalar.class, StringGroupColGreaterEqualCharScalar.class, + StringScalarGreaterEqualStringGroupColumn.class, + VarCharScalarGreaterEqualStringGroupColumn.class, CharScalarGreaterEqualStringGroupColumn.class, + FilterStringGroupColGreaterEqualStringScalar.class, FilterStringScalarGreaterEqualStringGroupColumn.class, + FilterStringGroupColGreaterEqualVarCharScalar.class, FilterVarCharScalarGreaterEqualStringGroupColumn.class, + FilterStringGroupColGreaterEqualCharScalar.class, FilterCharScalarGreaterEqualStringGroupColumn.class, FilterLongColGreaterEqualLongColumn.class, FilterLongColGreaterEqualDoubleColumn.class, FilterDoubleColGreaterEqualLongColumn.class, FilterDoubleColGreaterEqualDoubleColumn.class, FilterLongColGreaterEqualLongScalar.class, FilterLongColGreaterEqualDoubleScalar.class, diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java index b6d4d56..e1add92 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java @@ -35,9 +35,14 @@ DoubleColLessEqualLongScalar.class, DoubleColLessEqualDoubleScalar.class, LongScalarLessEqualLongColumn.class, LongScalarLessEqualDoubleColumn.class, DoubleScalarLessEqualLongColumn.class, DoubleScalarLessEqualDoubleColumn.class, - StringColLessEqualStringColumn.class, StringColLessEqualStringScalar.class, - StringScalarLessEqualStringColumn.class, FilterStringColLessEqualStringColumn.class, - FilterStringColLessEqualStringScalar.class, FilterStringScalarLessEqualStringColumn.class, + StringGroupColLessEqualStringGroupColumn.class, FilterStringGroupColLessEqualStringGroupColumn.class, + StringGroupColLessEqualStringScalar.class, + StringGroupColLessEqualVarCharScalar.class, StringGroupColLessEqualCharScalar.class, + StringScalarLessEqualStringGroupColumn.class, + VarCharScalarLessEqualStringGroupColumn.class, CharScalarLessEqualStringGroupColumn.class, + FilterStringGroupColLessEqualStringScalar.class, FilterStringScalarLessEqualStringGroupColumn.class, + FilterStringGroupColLessEqualVarCharScalar.class, FilterVarCharScalarLessEqualStringGroupColumn.class, + FilterStringGroupColLessEqualCharScalar.class, FilterCharScalarLessEqualStringGroupColumn.class, FilterLongColLessEqualLongColumn.class, FilterLongColLessEqualDoubleColumn.class, FilterDoubleColLessEqualLongColumn.class, FilterDoubleColLessEqualDoubleColumn.class, FilterLongColLessEqualLongScalar.class, FilterLongColLessEqualDoubleScalar.class, diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java index 3ef7b44..ff8cfe7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java @@ -35,9 +35,14 @@ DoubleColGreaterLongScalar.class, DoubleColGreaterDoubleScalar.class, LongScalarGreaterLongColumn.class, LongScalarGreaterDoubleColumn.class, DoubleScalarGreaterLongColumn.class, DoubleScalarGreaterDoubleColumn.class, - StringColGreaterStringColumn.class, StringColGreaterStringScalar.class, - StringScalarGreaterStringColumn.class, FilterStringColGreaterStringColumn.class, - FilterStringColGreaterStringScalar.class, FilterStringScalarGreaterStringColumn.class, + StringGroupColGreaterStringGroupColumn.class, FilterStringGroupColGreaterStringGroupColumn.class, + StringGroupColGreaterStringScalar.class, + StringGroupColGreaterVarCharScalar.class, StringGroupColGreaterCharScalar.class, + StringScalarGreaterStringGroupColumn.class, + VarCharScalarGreaterStringGroupColumn.class, CharScalarGreaterStringGroupColumn.class, + FilterStringGroupColGreaterStringScalar.class, FilterStringScalarGreaterStringGroupColumn.class, + FilterStringGroupColGreaterVarCharScalar.class, FilterVarCharScalarGreaterStringGroupColumn.class, + FilterStringGroupColGreaterCharScalar.class, FilterCharScalarGreaterStringGroupColumn.class, FilterLongColGreaterLongColumn.class, FilterLongColGreaterDoubleColumn.class, FilterDoubleColGreaterLongColumn.class, FilterDoubleColGreaterDoubleColumn.class, FilterLongColGreaterLongScalar.class, FilterLongColGreaterDoubleScalar.class, diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java index 27c983e..905dcc5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java @@ -35,9 +35,14 @@ DoubleColLessLongScalar.class, DoubleColLessDoubleScalar.class, LongScalarLessLongColumn.class, LongScalarLessDoubleColumn.class, DoubleScalarLessLongColumn.class, DoubleScalarLessDoubleColumn.class, - StringColLessStringColumn.class, StringColLessStringScalar.class, - StringScalarLessStringColumn.class, FilterStringColLessStringColumn.class, - FilterStringColLessStringScalar.class, FilterStringScalarLessStringColumn.class, + StringGroupColLessStringGroupColumn.class, FilterStringGroupColLessStringGroupColumn.class, + StringGroupColLessStringScalar.class, + StringGroupColLessVarCharScalar.class, StringGroupColLessCharScalar.class, + StringScalarLessStringGroupColumn.class, + VarCharScalarLessStringGroupColumn.class, CharScalarLessStringGroupColumn.class, + FilterStringGroupColLessStringScalar.class, FilterStringScalarLessStringGroupColumn.class, + FilterStringGroupColLessVarCharScalar.class, FilterVarCharScalarLessStringGroupColumn.class, + FilterStringGroupColLessCharScalar.class, FilterCharScalarLessStringGroupColumn.class, FilterLongColLessLongColumn.class, FilterLongColLessDoubleColumn.class, FilterDoubleColLessLongColumn.class, FilterDoubleColLessDoubleColumn.class, FilterLongColLessLongScalar.class, FilterLongColLessDoubleScalar.class, diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java index d604cd5..ab2c159 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java @@ -34,9 +34,14 @@ DoubleColNotEqualLongScalar.class, DoubleColNotEqualDoubleScalar.class, LongScalarNotEqualLongColumn.class, LongScalarNotEqualDoubleColumn.class, DoubleScalarNotEqualLongColumn.class, DoubleScalarNotEqualDoubleColumn.class, - StringColNotEqualStringColumn.class, StringColNotEqualStringScalar.class, - StringScalarNotEqualStringColumn.class, FilterStringColNotEqualStringColumn.class, - FilterStringColNotEqualStringScalar.class, FilterStringScalarNotEqualStringColumn.class, + StringGroupColNotEqualStringGroupColumn.class, FilterStringGroupColNotEqualStringGroupColumn.class, + StringGroupColNotEqualStringScalar.class, + StringGroupColNotEqualVarCharScalar.class, StringGroupColNotEqualCharScalar.class, + StringScalarNotEqualStringGroupColumn.class, + VarCharScalarNotEqualStringGroupColumn.class, CharScalarNotEqualStringGroupColumn.class, + FilterStringGroupColNotEqualStringScalar.class, FilterStringScalarNotEqualStringGroupColumn.class, + FilterStringGroupColNotEqualVarCharScalar.class, FilterVarCharScalarNotEqualStringGroupColumn.class, + FilterStringGroupColNotEqualCharScalar.class, FilterCharScalarNotEqualStringGroupColumn.class, FilterLongColNotEqualLongColumn.class, FilterLongColNotEqualDoubleColumn.class, FilterDoubleColNotEqualLongColumn.class, FilterDoubleColNotEqualDoubleColumn.class, FilterLongColNotEqualLongScalar.class, FilterLongColNotEqualDoubleScalar.class, diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java index 0612647..0c865dc 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java @@ -29,7 +29,9 @@ import junit.framework.Assert; +import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; +import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.ql.exec.vector.expressions.ColAndCol; import org.apache.hadoop.hive.ql.exec.vector.expressions.ColOrCol; import org.apache.hadoop.hive.ql.exec.vector.expressions.DoubleColumnInList; @@ -38,10 +40,14 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseDoubleToDouble; import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseLongToDouble; import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncPowerDoubleToDouble; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprCharScalarStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnCharScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnVarCharScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringGroupColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringGroupColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNotNull; import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNull; import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColumnInList; @@ -80,10 +86,14 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColumnBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColumnNotBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarGreaterLongColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringGroupColGreaterStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringGroupColGreaterStringScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColumnBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColumnNotBetween; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterVarCharColumnBetween; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterVarCharColumnNotBetween; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterCharColumnBetween; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterCharColumnNotBetween; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnDoubleToDouble; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRoundDoubleToDouble; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDouble; @@ -128,6 +138,8 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp; +import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.junit.Test; @@ -289,11 +301,12 @@ public void testStringFilterExpressions() throws HiveException { VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); - assertTrue(ve instanceof FilterStringColGreaterStringScalar); + assertTrue(ve instanceof FilterStringGroupColGreaterStringScalar); } @Test public void testFilterStringColCompareStringColumnExpressions() throws HiveException { + // Strings test ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(String.class, "col1", "table", false); ExprNodeColumnDesc col2Expr = new ExprNodeColumnDesc(String.class, "col2", "table", false); @@ -313,7 +326,97 @@ public void testFilterStringColCompareStringColumnExpressions() throws HiveExcep VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); - assertTrue(ve instanceof FilterStringColGreaterStringColumn); + assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn); + + // 2 CHAR test + CharTypeInfo charTypeInfo = new CharTypeInfo(10); + col1Expr = new ExprNodeColumnDesc(charTypeInfo, "col1", "table", false); + col2Expr = new ExprNodeColumnDesc(charTypeInfo, "col2", "table", false); + + udf = new GenericUDFOPGreaterThan(); + exprDesc = new ExprNodeGenericFuncDesc(); + exprDesc.setGenericUDF(udf); + children1 = new ArrayList(2); + children1.add(col1Expr); + children1.add(col2Expr); + exprDesc.setChildren(children1); + + vc = new VectorizationContext(columnMap, 2); + + ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); + + assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn); + + // 2 VARCHAR test + VarcharTypeInfo varcharTypeInfo = new VarcharTypeInfo(10); + col1Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col1", "table", false); + col2Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col2", "table", false); + + udf = new GenericUDFOPGreaterThan(); + exprDesc = new ExprNodeGenericFuncDesc(); + exprDesc.setGenericUDF(udf); + children1 = new ArrayList(2); + children1.add(col1Expr); + children1.add(col2Expr); + exprDesc.setChildren(children1); + + vc = new VectorizationContext(columnMap, 2); + + ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); + + assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn); + + // Some mix tests (STRING, CHAR), (VARCHAR, CHAR), (VARCHAR, STRING)... + col1Expr = new ExprNodeColumnDesc(String.class, "col1", "table", false); + col2Expr = new ExprNodeColumnDesc(charTypeInfo, "col2", "table", false); + + udf = new GenericUDFOPGreaterThan(); + exprDesc = new ExprNodeGenericFuncDesc(); + exprDesc.setGenericUDF(udf); + children1 = new ArrayList(2); + children1.add(col1Expr); + children1.add(col2Expr); + exprDesc.setChildren(children1); + + vc = new VectorizationContext(columnMap, 2); + + ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); + + assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn); + + col1Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col1", "table", false); + col2Expr = new ExprNodeColumnDesc(charTypeInfo, "col2", "table", false); + + udf = new GenericUDFOPGreaterThan(); + exprDesc = new ExprNodeGenericFuncDesc(); + exprDesc.setGenericUDF(udf); + children1 = new ArrayList(2); + children1.add(col1Expr); + children1.add(col2Expr); + exprDesc.setChildren(children1); + + vc = new VectorizationContext(columnMap, 2); + + ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); + + assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn); + + col1Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col1", "table", false); + col2Expr = new ExprNodeColumnDesc(String.class, "col2", "table", false); + + udf = new GenericUDFOPGreaterThan(); + exprDesc = new ExprNodeGenericFuncDesc(); + exprDesc.setGenericUDF(udf); + children1 = new ArrayList(2); + children1.add(col1Expr); + children1.add(col2Expr); + exprDesc.setChildren(children1); + + vc = new VectorizationContext(columnMap, 2); + + ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); + + assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn); } @Test @@ -908,6 +1011,7 @@ public void testTimeStampUdfs() throws HiveException { @Test public void testBetweenFilters() throws HiveException { + // string tests ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(String.class, "col1", "table", false); ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc("Alpha"); ExprNodeConstantDesc constDesc2 = new ExprNodeConstantDesc("Bravo"); @@ -934,6 +1038,56 @@ public void testBetweenFilters() throws HiveException { ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); assertTrue(ve instanceof FilterStringColumnNotBetween); + // CHAR tests + CharTypeInfo charTypeInfo = new CharTypeInfo(10); + col1Expr = new ExprNodeColumnDesc(charTypeInfo, "col1", "table", false); + constDesc = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Alpha", 10)); + constDesc2 = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Bravo", 10)); + + // CHAR BETWEEN + udf = new GenericUDFBetween(); + children1 = new ArrayList(); + children1.add(new ExprNodeConstantDesc(new Boolean(false))); // no NOT keyword + children1.add(col1Expr); + children1.add(constDesc); + children1.add(constDesc2); + exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, + children1); + + vc = new VectorizationContext(columnMap, 2); + ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); + assertTrue(ve instanceof FilterCharColumnBetween); + + // CHAR NOT BETWEEN + children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); // has NOT keyword + ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); + assertTrue(ve instanceof FilterCharColumnNotBetween); + + // VARCHAR tests + VarcharTypeInfo varcharTypeInfo = new VarcharTypeInfo(10); + col1Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col1", "table", false); + constDesc = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Alpha", 10)); + constDesc2 = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Bravo", 10)); + + // VARCHAR BETWEEN + udf = new GenericUDFBetween(); + children1 = new ArrayList(); + children1.add(new ExprNodeConstantDesc(new Boolean(false))); // no NOT keyword + children1.add(col1Expr); + children1.add(constDesc); + children1.add(constDesc2); + exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, + children1); + + vc = new VectorizationContext(columnMap, 2); + ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); + assertTrue(ve instanceof FilterVarCharColumnBetween); + + // VARCHAR NOT BETWEEN + children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); // has NOT keyword + ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER); + assertTrue(ve instanceof FilterVarCharColumnNotBetween); + // long BETWEEN children1.set(0, new ExprNodeConstantDesc(new Boolean(false))); children1.set(1, new ExprNodeColumnDesc(Long.class, "col1", "table", false)); @@ -1173,12 +1327,12 @@ public void testIfConditionalExprs() throws HiveException { children1.set(1, col2Expr); children1.set(2, col3Expr); ve = vc.getVectorExpression(exprDesc); - assertTrue(ve instanceof IfExprStringColumnStringColumn); + assertTrue(ve instanceof IfExprStringGroupColumnStringGroupColumn); // column/scalar children1.set(2, constDesc3); ve = vc.getVectorExpression(exprDesc); - assertTrue(ve instanceof IfExprStringColumnStringScalar); + assertTrue(ve instanceof IfExprStringGroupColumnStringScalar); // scalar/scalar children1.set(1, constDesc2); @@ -1188,7 +1342,62 @@ public void testIfConditionalExprs() throws HiveException { // scalar/column children1.set(2, col3Expr); ve = vc.getVectorExpression(exprDesc); - assertTrue(ve instanceof IfExprStringScalarStringColumn); - } + assertTrue(ve instanceof IfExprStringScalarStringGroupColumn); + // test for CHAR type + CharTypeInfo charTypeInfo = new CharTypeInfo(10); + constDesc2 = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Alpha", 10)); + constDesc3 = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Bravo", 10)); + col2Expr = new ExprNodeColumnDesc(charTypeInfo, "col2", "table", false); + col3Expr = new ExprNodeColumnDesc(charTypeInfo, "col3", "table", false); + + // column/column + children1.set(1, col2Expr); + children1.set(2, col3Expr); + ve = vc.getVectorExpression(exprDesc); + assertTrue(ve instanceof IfExprStringGroupColumnStringGroupColumn); + + // column/scalar + children1.set(2, constDesc3); + ve = vc.getVectorExpression(exprDesc); + assertTrue(ve instanceof IfExprStringGroupColumnCharScalar); + + // scalar/scalar + children1.set(1, constDesc2); +// ve = vc.getVectorExpression(exprDesc); +// assertTrue(ve instanceof IfExprCharScalarCharScalar); + + // scalar/column + children1.set(2, col3Expr); + ve = vc.getVectorExpression(exprDesc); + assertTrue(ve instanceof IfExprCharScalarStringGroupColumn); + + // test for VARCHAR type + VarcharTypeInfo varcharTypeInfo = new VarcharTypeInfo(10); + constDesc2 = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Alpha", 10)); + constDesc3 = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Bravo", 10)); + col2Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col2", "table", false); + col3Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col3", "table", false); + + // column/column + children1.set(1, col2Expr); + children1.set(2, col3Expr); + ve = vc.getVectorExpression(exprDesc); + assertTrue(ve instanceof IfExprStringGroupColumnStringGroupColumn); + + // column/scalar + children1.set(2, constDesc3); + ve = vc.getVectorExpression(exprDesc); + assertTrue(ve instanceof IfExprStringGroupColumnVarCharScalar); + + // scalar/scalar + children1.set(1, constDesc2); +// ve = vc.getVectorExpression(exprDesc); +// assertTrue(ve instanceof IfExprVarCharScalarVarCharScalar); + + // scalar/column + children1.set(2, col3Expr); + ve = vc.getVectorExpression(exprDesc); + assertTrue(ve instanceof IfExprVarCharScalarStringGroupColumn); + } } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java index fccac66..a711b55 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java @@ -32,10 +32,10 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleScalar; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnDoubleScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringGroupColumn; import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringColumn; import org.junit.Test; @@ -388,7 +388,7 @@ public void testDoubleColumnScalarIfExpr() { @Test public void testIfExprStringColumnStringColumn() { VectorizedRowBatch batch = getBatch1Long3BytesVectors(); - VectorExpression expr = new IfExprStringColumnStringColumn(0, 1, 2, 3); + VectorExpression expr = new IfExprStringGroupColumnStringGroupColumn(0, 1, 2, 3); BytesColumnVector r = (BytesColumnVector) batch.cols[3]; expr.evaluate(batch); assertTrue(getString(r, 0).equals("arg3_0")); @@ -474,7 +474,7 @@ public void testIfExprStringColumnStringColumn() { public void testIfExprStringColumnStringScalar() { VectorizedRowBatch batch = getBatch1Long3BytesVectors(); byte[] scalar = getUTF8Bytes("scalar"); - VectorExpression expr = new IfExprStringColumnStringScalar(0, 1, scalar, 3); + VectorExpression expr = new IfExprStringGroupColumnStringScalar(0, 1, scalar, 3); BytesColumnVector r = (BytesColumnVector) batch.cols[3]; expr.evaluate(batch); assertTrue(getString(r, 0).equals("scalar")); @@ -498,7 +498,7 @@ public void testIfExprStringColumnStringScalar() { public void testIfExprStringScalarStringColumn() { VectorizedRowBatch batch = getBatch1Long3BytesVectors(); byte[] scalar = getUTF8Bytes("scalar"); - VectorExpression expr = new IfExprStringScalarStringColumn(0,scalar, 2, 3); + VectorExpression expr = new IfExprStringScalarStringGroupColumn(0,scalar, 2, 3); BytesColumnVector r = (BytesColumnVector) batch.cols[3]; expr.evaluate(batch); assertTrue(getString(r, 0).equals("arg3_0")); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java index dd9ab1c..74242f5 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java @@ -26,16 +26,16 @@ import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColLessStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColLessStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarGreaterStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarLessEqualStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColEqualStringScalar; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColLessStringColumn; -import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarEqualStringColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringGroupColEqualStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringGroupColGreaterEqualStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringGroupColLessStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringGroupColLessStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarEqualStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarGreaterStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringScalarLessEqualStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringGroupColEqualStringScalar; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringGroupColLessStringGroupColumn; +import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarEqualStringGroupColumn; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.io.Text; import org.junit.Test; @@ -107,6 +107,84 @@ static void addMultiByteChars(byte[] b) { b[i++] = (byte) 0xA2; } + //------------------------------------------------------------- + + // total characters = 2; byte length = 3 + static void addMultiByteCharLeftPadded1_1(byte[] b) { + int i = 0; + b[i++] = (byte) 0x20; // blank " " (1 byte) + b[i++] = (byte) 0xD0; // Cyrillic Capital DJE U+402 (2 bytes) + b[i++] = (byte) 0x82; + } + + // total characters = 3; byte length = 9 + static void addMultiByteCharLeftPadded1_2(byte[] b) { + int i = 0; + b[i++] = (byte) 0x20; // blank " " (1 byte) + b[i++] = (byte) 0xF0; // Smiling Face with Open Mouth and Smiling Eyes U+1F604 (4 bytes) + b[i++] = (byte) 0x9F; + b[i++] = (byte) 0x98; + b[i++] = (byte) 0x84; + b[i++] = (byte) 0xF0; // Grimacing Face U+1F62C (4 bytes) + b[i++] = (byte) 0x9F; + b[i++] = (byte) 0x98; + b[i++] = (byte) 0xAC; + } + + // total characters = 4; byte length = 6 + static void addMultiByteCharLeftPadded3_1(byte[] b) { + int i = 0; + b[i++] = (byte) 0x20; // blank " " (1 byte) + b[i++] = (byte) 0x20; // blank " " (1 byte) + b[i++] = (byte) 0x20; // blank " " (1 byte) + b[i++] = (byte) 0xE4; // Asian character U+4824 (3 bytes) + b[i++] = (byte) 0xA0; + b[i++] = (byte) 0xA4; + } + + //------------------------------------------------------------- + + // total characters = 2; byte length = 4 + static void addMultiByteCharRightPadded1_1(byte[] b) { + int i = 0; + b[i++] = (byte) 0xE0; // Tamil Om U+0BD0 (3 bytes) + b[i++] = (byte) 0xAF; + b[i++] = (byte) 0x90; + b[i++] = (byte) 0x20; // blank " " (1 byte) + } + + // total characters = 3; byte length = 5 + static void addMultiByteCharRightPadded1_2(byte[] b) { + int i = 0; + b[i++] = (byte) 0xEA; // Va Syllable MEE U+A521 (3 bytes) + b[i++] = (byte) 0x94; + b[i++] = (byte) 0xA1; + b[i++] = (byte) 0x5A; // Latin Capital Letter Z U+005A (1 bytes) + b[i++] = (byte) 0x20; // blank " " (1 byte) + } + + @Test + // Test basic rightTrim routine. + public void testRightTrim() { + byte[] multiByte = new byte[100]; + int resultLen; + addMultiByteCharRightPadded1_1(multiByte); + resultLen = StringExpr.rightTrim(multiByte, 0, 4); + Assert.assertTrue(resultLen == 3); + + addMultiByteCharRightPadded1_2(multiByte); + resultLen = StringExpr.rightTrim(multiByte, 0, 5); + Assert.assertTrue(resultLen == 4); + + addMultiByteCharRightPadded1_1(multiByte); + resultLen = StringExpr.rightTrim(multiByte, 3, 1); + Assert.assertTrue(resultLen == 0); + + addMultiByteCharRightPadded1_2(multiByte); + resultLen = StringExpr.rightTrim(multiByte, 3, 2); + Assert.assertTrue(resultLen == 1); + } + @Test // Load a BytesColumnVector by copying in large data, enough to force // the buffer to expand. @@ -157,7 +235,7 @@ public void testLoadBytesColumnVectorByRef() { public void testStringColCompareStringScalarFilter() { VectorizedRowBatch batch = makeStringBatch(); VectorExpression expr; - expr = new FilterStringColEqualStringScalar(0, red2); + expr = new FilterStringGroupColEqualStringScalar(0, red2); expr.evaluate(batch); // only red qualifies, and it's in entry 0 @@ -165,7 +243,7 @@ public void testStringColCompareStringScalarFilter() { Assert.assertTrue(batch.selected[0] == 0); batch = makeStringBatch(); - expr = new FilterStringColLessStringScalar(0, red2); + expr = new FilterStringGroupColLessStringScalar(0, red2); expr.evaluate(batch); // only green qualifies, and it's in entry 1 @@ -173,7 +251,7 @@ public void testStringColCompareStringScalarFilter() { Assert.assertTrue(batch.selected[0] == 1); batch = makeStringBatch(); - expr = new FilterStringColGreaterEqualStringScalar(0, green); + expr = new FilterStringGroupColGreaterEqualStringScalar(0, green); expr.evaluate(batch); // green and red qualify @@ -187,7 +265,7 @@ public void testStringColCompareStringScalarProjection() { VectorizedRowBatch batch = makeStringBatch(); VectorExpression expr; - expr = new StringColEqualStringScalar(0, red2, 2); + expr = new StringGroupColEqualStringScalar(0, red2, 2); expr.evaluate(batch); Assert.assertEquals(3, batch.size); LongColumnVector outVector = (LongColumnVector) batch.cols[2]; @@ -196,7 +274,7 @@ public void testStringColCompareStringScalarProjection() { Assert.assertEquals(0, outVector.vector[2]); batch = makeStringBatch(); - expr = new StringColEqualStringScalar(0, green, 2); + expr = new StringGroupColEqualStringScalar(0, green, 2); expr.evaluate(batch); Assert.assertEquals(3, batch.size); outVector = (LongColumnVector) batch.cols[2]; @@ -210,7 +288,7 @@ public void testStringColCompareStringScalarProjection() { public void testStringScalarCompareStringCol() { VectorizedRowBatch batch = makeStringBatch(); VectorExpression expr; - expr = new FilterStringScalarEqualStringColumn(red2, 0); + expr = new FilterStringScalarEqualStringGroupColumn(red2, 0); expr.evaluate(batch); // only red qualifies, and it's in entry 0 @@ -218,7 +296,7 @@ public void testStringScalarCompareStringCol() { Assert.assertTrue(batch.selected[0] == 0); batch = makeStringBatch(); - expr = new FilterStringScalarGreaterStringColumn(red2, 0); + expr = new FilterStringScalarGreaterStringGroupColumn(red2, 0); expr.evaluate(batch); // only green qualifies, and it's in entry 1 @@ -226,7 +304,7 @@ public void testStringScalarCompareStringCol() { Assert.assertTrue(batch.selected[0] == 1); batch = makeStringBatch(); - expr = new FilterStringScalarLessEqualStringColumn(green, 0); + expr = new FilterStringScalarLessEqualStringGroupColumn(green, 0); expr.evaluate(batch); // green and red qualify @@ -240,7 +318,7 @@ public void testStringScalarCompareStringColProjection() { VectorizedRowBatch batch = makeStringBatch(); VectorExpression expr; - expr = new StringScalarEqualStringColumn(red2, 0, 2); + expr = new StringScalarEqualStringGroupColumn(red2, 0, 2); expr.evaluate(batch); Assert.assertEquals(3, batch.size); LongColumnVector outVector = (LongColumnVector) batch.cols[2]; @@ -249,7 +327,7 @@ public void testStringScalarCompareStringColProjection() { Assert.assertEquals(0, outVector.vector[2]); batch = makeStringBatch(); - expr = new StringScalarEqualStringColumn(green, 0, 2); + expr = new StringScalarEqualStringGroupColumn(green, 0, 2); expr.evaluate(batch); Assert.assertEquals(3, batch.size); outVector = (LongColumnVector) batch.cols[2]; @@ -275,7 +353,7 @@ public void testStringColCompareStringColFilter() { // nulls possible on left, right batch = makeStringBatchForColColCompare(); - expr = new FilterStringColLessStringColumn(0,1); + expr = new FilterStringGroupColLessStringGroupColumn(0,1); expr.evaluate(batch); Assert.assertEquals(1, batch.size); Assert.assertEquals(0, batch.selected[0]); @@ -428,7 +506,7 @@ public void testStringColCompareStringColProjection() { // nulls possible on left, right batch = makeStringBatchForColColCompare(); - expr = new StringColLessStringColumn(0, 1, 3); + expr = new StringGroupColLessStringGroupColumn(0, 1, 3); expr.evaluate(batch); Assert.assertEquals(4, batch.size); outVector = ((LongColumnVector) batch.cols[3]).vector; diff --git ql/src/test/queries/clientpositive/vector_char_2.q ql/src/test/queries/clientpositive/vector_char_2.q new file mode 100644 index 0000000..0828ca1 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_char_2.q @@ -0,0 +1,49 @@ +SET hive.vectorized.execution.enabled=true; +drop table char_2; + +create table char_2 ( + key char(10), + value char(20) +) stored as orc; + +insert overwrite table char_2 select * from src; + +select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value asc +limit 5; + +explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5; + +-- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5; + +select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value desc +limit 5; + +explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5; + +-- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5; + +drop table char_2; diff --git ql/src/test/queries/clientpositive/vector_char_simple.q ql/src/test/queries/clientpositive/vector_char_simple.q new file mode 100644 index 0000000..ec46630 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_char_simple.q @@ -0,0 +1,43 @@ +SET hive.vectorized.execution.enabled=true; +drop table char_2; + +create table char_2 ( + key char(10), + value char(20) +) stored as orc; + +insert overwrite table char_2 select * from src; + +select key, value +from src +order by key asc +limit 5; + +explain select key, value +from char_2 +order by key asc +limit 5; + +-- should match the query from src +select key, value +from char_2 +order by key asc +limit 5; + +select key, value +from src +order by key desc +limit 5; + +explain select key, value +from char_2 +order by key desc +limit 5; + +-- should match the query from src +select key, value +from char_2 +order by key desc +limit 5; + +drop table char_2; diff --git ql/src/test/queries/clientpositive/vector_string_temp.q ql/src/test/queries/clientpositive/vector_string_temp.q new file mode 100644 index 0000000..0ac71ca --- /dev/null +++ ql/src/test/queries/clientpositive/vector_string_temp.q @@ -0,0 +1,49 @@ +SET hive.vectorized.execution.enabled=true; +drop table char_2; + +create table char_2 ( + key string, + value string +) stored as orc; + +insert overwrite table char_2 select * from src; + +select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value asc +limit 5; + +explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5; + +-- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5; + +select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value desc +limit 5; + +explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5; + +-- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5; + +drop table char_2; diff --git ql/src/test/queries/clientpositive/vector_varchar_simple.q ql/src/test/queries/clientpositive/vector_varchar_simple.q new file mode 100644 index 0000000..68d6b09 --- /dev/null +++ ql/src/test/queries/clientpositive/vector_varchar_simple.q @@ -0,0 +1,43 @@ +SET hive.vectorized.execution.enabled=true; +drop table char_2; + +create table char_2 ( + key varchar(10), + value varchar(20) +) stored as orc; + +insert overwrite table char_2 select * from src; + +select key, value +from src +order by key asc +limit 5; + +explain select key, value +from char_2 +order by key asc +limit 5; + +-- should match the query from src +select key, value +from char_2 +order by key asc +limit 5; + +select key, value +from src +order by key desc +limit 5; + +explain select key, value +from char_2 +order by key desc +limit 5; + +-- should match the query from src +select key, value +from char_2 +order by key desc +limit 5; + +drop table char_2; diff --git ql/src/test/results/clientpositive/alter_merge_orc.q.out ql/src/test/results/clientpositive/alter_merge_orc.q.out index f26e38c..23c8bc8 100644 --- ql/src/test/results/clientpositive/alter_merge_orc.q.out +++ ql/src/test/results/clientpositive/alter_merge_orc.q.out @@ -48,9 +48,9 @@ columns:struct columns { i32 key, string value} partitioned:false partitionColumns: totalNumberFiles:3 -totalFileSize:7380 -maxFileSize:2460 -minFileSize:2460 +totalFileSize:6525 +maxFileSize:2175 +minFileSize:2175 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test @@ -91,9 +91,9 @@ columns:struct columns { i32 key, string value} partitioned:false partitionColumns: totalNumberFiles:1 -totalFileSize:7059 -maxFileSize:7059 -minFileSize:7059 +totalFileSize:6204 +maxFileSize:6204 +minFileSize:6204 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test @@ -171,9 +171,9 @@ columns:struct columns { i32 key, string value} partitioned:true partitionColumns:struct partition_columns { string ds} totalNumberFiles:3 -totalFileSize:7380 -maxFileSize:2460 -minFileSize:2460 +totalFileSize:6525 +maxFileSize:2175 +minFileSize:2175 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test_part @@ -218,9 +218,9 @@ columns:struct columns { i32 key, string value} partitioned:true partitionColumns:struct partition_columns { string ds} totalNumberFiles:1 -totalFileSize:7059 -maxFileSize:7059 -minFileSize:7059 +totalFileSize:6204 +maxFileSize:6204 +minFileSize:6204 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test_part diff --git ql/src/test/results/clientpositive/alter_merge_stats_orc.q.out ql/src/test/results/clientpositive/alter_merge_stats_orc.q.out index 64e51d5..cfce2ee 100644 --- ql/src/test/results/clientpositive/alter_merge_stats_orc.q.out +++ ql/src/test/results/clientpositive/alter_merge_stats_orc.q.out @@ -48,9 +48,9 @@ columns:struct columns { i32 key, string value} partitioned:false partitionColumns: totalNumberFiles:3 -totalFileSize:7380 -maxFileSize:2460 -minFileSize:2460 +totalFileSize:6525 +maxFileSize:2175 +minFileSize:2175 #### A masked pattern was here #### PREHOOK: query: desc extended src_orc_merge_test_stat @@ -92,7 +92,7 @@ Table Parameters: numFiles 3 numRows 1500 rawDataSize 141000 - totalSize 7380 + totalSize 6525 #### A masked pattern was here #### # Storage Information @@ -142,7 +142,7 @@ Table Parameters: numFiles 1 numRows 1500 rawDataSize 141000 - totalSize 7059 + totalSize 6204 #### A masked pattern was here #### # Storage Information @@ -212,9 +212,9 @@ columns:struct columns { i32 key, string value} partitioned:true partitionColumns:struct partition_columns { string ds} totalNumberFiles:3 -totalFileSize:7380 -maxFileSize:2460 -minFileSize:2460 +totalFileSize:6525 +maxFileSize:2175 +minFileSize:2175 #### A masked pattern was here #### PREHOOK: query: desc formatted src_orc_merge_test_part_stat partition (ds='2011') @@ -245,7 +245,7 @@ Partition Parameters: numFiles 3 numRows 500 rawDataSize 47000 - totalSize 7380 + totalSize 6525 #### A masked pattern was here #### # Storage Information @@ -294,7 +294,7 @@ Partition Parameters: numFiles 3 numRows 1500 rawDataSize 141000 - totalSize 7380 + totalSize 6525 #### A masked pattern was here #### # Storage Information @@ -351,7 +351,7 @@ Partition Parameters: numFiles 1 numRows 1500 rawDataSize 141000 - totalSize 7059 + totalSize 6204 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/annotate_stats_part.q.out ql/src/test/results/clientpositive/annotate_stats_part.q.out index 6262d37..1fc0420 100644 --- ql/src/test/results/clientpositive/annotate_stats_part.q.out +++ ql/src/test/results/clientpositive/annotate_stats_part.q.out @@ -98,11 +98,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 5 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 712 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 712 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: -- partition level analyze statistics for specific parition @@ -158,11 +158,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 9 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 712 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 9 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 712 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: -- basicStatState: COMPLETE colStatState: NONE @@ -181,11 +181,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 7 Data size: 399 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 387 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 7 Data size: 399 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 387 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: -- partition level analyze statistics for all partitions @@ -245,11 +245,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: -- basicStatState: COMPLETE colStatState: NONE @@ -268,11 +268,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: -- both partitions will be pruned @@ -331,14 +331,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: zip (type: bigint) outputColumnNames: _col0 - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -366,7 +366,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: PARTIAL Select Operator expressions: state (type: string) outputColumnNames: _col0 @@ -403,7 +403,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: PARTIAL Select Operator expressions: state (type: string), locid (type: int) outputColumnNames: _col0, _col1 @@ -438,7 +438,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 7 Data size: 399 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 387 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: state (type: string), locid (type: int) outputColumnNames: _col0, _col1 @@ -508,11 +508,11 @@ STAGE PLANS: Processor Tree: TableScan alias: loc_orc - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: PARTIAL Select Operator expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 8 Data size: 724 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 8 Data size: 712 Basic stats: COMPLETE Column stats: PARTIAL ListSink PREHOOK: query: -- This is to test filter expression evaluation on partition column @@ -533,7 +533,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 7 Data size: 399 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 387 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid > 0) (type: boolean) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE @@ -569,7 +569,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 7 Data size: 399 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 387 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid > 0) (type: boolean) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE @@ -605,7 +605,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: loc_orc - Statistics: Num rows: 7 Data size: 399 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 387 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (locid > 0) (type: boolean) Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE diff --git ql/src/test/results/clientpositive/annotate_stats_table.q.out ql/src/test/results/clientpositive/annotate_stats_table.q.out index e0e7021..a19b884 100644 --- ql/src/test/results/clientpositive/annotate_stats_table.q.out +++ ql/src/test/results/clientpositive/annotate_stats_table.q.out @@ -89,11 +89,11 @@ STAGE PLANS: Processor Tree: TableScan alias: emp_orc - Statistics: Num rows: 3 Data size: 364 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 357 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: lastname (type: string), deptid (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 364 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 357 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: -- table level analyze statistics @@ -122,11 +122,11 @@ STAGE PLANS: Processor Tree: TableScan alias: emp_orc - Statistics: Num rows: 48 Data size: 364 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 48 Data size: 357 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: lastname (type: string), deptid (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 48 Data size: 364 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 48 Data size: 357 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: -- column level partial statistics @@ -155,11 +155,11 @@ STAGE PLANS: Processor Tree: TableScan alias: emp_orc - Statistics: Num rows: 48 Data size: 364 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 48 Data size: 357 Basic stats: COMPLETE Column stats: PARTIAL Select Operator expressions: lastname (type: string), deptid (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 48 Data size: 364 Basic stats: COMPLETE Column stats: PARTIAL + Statistics: Num rows: 48 Data size: 357 Basic stats: COMPLETE Column stats: PARTIAL ListSink PREHOOK: query: -- all selected columns have statistics @@ -180,7 +180,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: emp_orc - Statistics: Num rows: 48 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 48 Data size: 357 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: deptid (type: int) outputColumnNames: _col0 @@ -225,11 +225,11 @@ STAGE PLANS: Processor Tree: TableScan alias: emp_orc - Statistics: Num rows: 48 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 48 Data size: 357 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: lastname (type: string), deptid (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 48 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 48 Data size: 357 Basic stats: COMPLETE Column stats: COMPLETE ListSink PREHOOK: query: -- basicStatState: COMPLETE colStatState: COMPLETE @@ -248,7 +248,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: emp_orc - Statistics: Num rows: 48 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 48 Data size: 357 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: lastname (type: string) outputColumnNames: _col0 @@ -283,7 +283,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: emp_orc - Statistics: Num rows: 48 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 48 Data size: 357 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: deptid (type: int) outputColumnNames: _col0 @@ -318,7 +318,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: emp_orc - Statistics: Num rows: 48 Data size: 364 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 48 Data size: 357 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: lastname (type: string), deptid (type: int) outputColumnNames: _col0, _col1 diff --git ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out index ef63e74..da2fa16 100644 --- ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out +++ ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out @@ -215,7 +215,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 364 + totalSize 351 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -383,7 +383,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 364 + totalSize 351 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde diff --git ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out index cbe210b..a99b5c1 100644 --- ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out +++ ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out @@ -232,7 +232,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 364 + totalSize 351 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -322,7 +322,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 390 + totalSize 384 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -492,7 +492,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 364 + totalSize 351 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -582,7 +582,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 390 + totalSize 384 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -765,7 +765,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 364 + totalSize 351 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -855,7 +855,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 390 + totalSize 384 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1021,7 +1021,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 364 + totalSize 351 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1111,7 +1111,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 390 + totalSize 384 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1396,7 +1396,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 280 + totalSize 266 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1580,7 +1580,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 278 + totalSize 264 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1764,7 +1764,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 271 + totalSize 259 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1985,7 +1985,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 280 + totalSize 266 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2169,7 +2169,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 278 + totalSize 264 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2353,7 +2353,7 @@ STAGE PLANS: serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 271 + totalSize 259 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde diff --git ql/src/test/results/clientpositive/orc_analyze.q.out ql/src/test/results/clientpositive/orc_analyze.q.out index 979d2a7..df96d60 100644 --- ql/src/test/results/clientpositive/orc_analyze.q.out +++ ql/src/test/results/clientpositive/orc_analyze.q.out @@ -105,7 +105,7 @@ Table Parameters: numFiles 1 numRows 100 rawDataSize 52600 - totalSize 3042 + totalSize 3034 #### A masked pattern was here #### # Storage Information @@ -195,7 +195,7 @@ Table Parameters: numFiles 1 numRows 100 rawDataSize 52600 - totalSize 3042 + totalSize 3034 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/orc_merge2.q.out ql/src/test/results/clientpositive/orc_merge2.q.out index 55b1de3..a8dfbea 100644 --- ql/src/test/results/clientpositive/orc_merge2.q.out +++ ql/src/test/results/clientpositive/orc_merge2.q.out @@ -178,7 +178,7 @@ Partition Parameters: numFiles 1 numRows 4 rawDataSize 376 - totalSize 320 + totalSize 309 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/orc_merge3.q.out ql/src/test/results/clientpositive/orc_merge3.q.out index 258f538..31ceda7 100644 --- ql/src/test/results/clientpositive/orc_merge3.q.out +++ ql/src/test/results/clientpositive/orc_merge3.q.out @@ -161,7 +161,7 @@ Table Parameters: numFiles 1 numRows 1000 rawDataSize 94000 - totalSize 4762 + totalSize 4192 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/orc_merge4.q.out ql/src/test/results/clientpositive/orc_merge4.q.out index 3a2bb88..f16ce08 100644 --- ql/src/test/results/clientpositive/orc_merge4.q.out +++ ql/src/test/results/clientpositive/orc_merge4.q.out @@ -64,7 +64,7 @@ Partition Parameters: numFiles 1 numRows 500 rawDataSize 47000 - totalSize 2460 + totalSize 2175 #### A masked pattern was here #### # Storage Information @@ -129,7 +129,7 @@ Partition Parameters: numFiles 1 numRows 500 rawDataSize 47000 - totalSize 2460 + totalSize 2175 #### A masked pattern was here #### # Storage Information @@ -170,7 +170,7 @@ Partition Parameters: numFiles 1 numRows 500 rawDataSize 47000 - totalSize 2460 + totalSize 2175 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/orc_merge_incompat1.q.out ql/src/test/results/clientpositive/orc_merge_incompat1.q.out index 81167bb..db74a89 100644 --- ql/src/test/results/clientpositive/orc_merge_incompat1.q.out +++ ql/src/test/results/clientpositive/orc_merge_incompat1.q.out @@ -172,7 +172,7 @@ Table Parameters: numFiles 5 numRows 15 rawDataSize 3825 - totalSize 2739 + totalSize 2717 #### A masked pattern was here #### # Storage Information @@ -250,7 +250,7 @@ Table Parameters: numFiles 3 numRows 15 rawDataSize 3825 - totalSize 2235 + totalSize 2213 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/tez/alter_merge_orc.q.out ql/src/test/results/clientpositive/tez/alter_merge_orc.q.out index f26e38c..23c8bc8 100644 --- ql/src/test/results/clientpositive/tez/alter_merge_orc.q.out +++ ql/src/test/results/clientpositive/tez/alter_merge_orc.q.out @@ -48,9 +48,9 @@ columns:struct columns { i32 key, string value} partitioned:false partitionColumns: totalNumberFiles:3 -totalFileSize:7380 -maxFileSize:2460 -minFileSize:2460 +totalFileSize:6525 +maxFileSize:2175 +minFileSize:2175 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test @@ -91,9 +91,9 @@ columns:struct columns { i32 key, string value} partitioned:false partitionColumns: totalNumberFiles:1 -totalFileSize:7059 -maxFileSize:7059 -minFileSize:7059 +totalFileSize:6204 +maxFileSize:6204 +minFileSize:6204 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test @@ -171,9 +171,9 @@ columns:struct columns { i32 key, string value} partitioned:true partitionColumns:struct partition_columns { string ds} totalNumberFiles:3 -totalFileSize:7380 -maxFileSize:2460 -minFileSize:2460 +totalFileSize:6525 +maxFileSize:2175 +minFileSize:2175 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test_part @@ -218,9 +218,9 @@ columns:struct columns { i32 key, string value} partitioned:true partitionColumns:struct partition_columns { string ds} totalNumberFiles:1 -totalFileSize:7059 -maxFileSize:7059 -minFileSize:7059 +totalFileSize:6204 +maxFileSize:6204 +minFileSize:6204 #### A masked pattern was here #### PREHOOK: query: select count(1) from src_orc_merge_test_part diff --git ql/src/test/results/clientpositive/tez/alter_merge_stats_orc.q.out ql/src/test/results/clientpositive/tez/alter_merge_stats_orc.q.out index 64e51d5..cfce2ee 100644 --- ql/src/test/results/clientpositive/tez/alter_merge_stats_orc.q.out +++ ql/src/test/results/clientpositive/tez/alter_merge_stats_orc.q.out @@ -48,9 +48,9 @@ columns:struct columns { i32 key, string value} partitioned:false partitionColumns: totalNumberFiles:3 -totalFileSize:7380 -maxFileSize:2460 -minFileSize:2460 +totalFileSize:6525 +maxFileSize:2175 +minFileSize:2175 #### A masked pattern was here #### PREHOOK: query: desc extended src_orc_merge_test_stat @@ -92,7 +92,7 @@ Table Parameters: numFiles 3 numRows 1500 rawDataSize 141000 - totalSize 7380 + totalSize 6525 #### A masked pattern was here #### # Storage Information @@ -142,7 +142,7 @@ Table Parameters: numFiles 1 numRows 1500 rawDataSize 141000 - totalSize 7059 + totalSize 6204 #### A masked pattern was here #### # Storage Information @@ -212,9 +212,9 @@ columns:struct columns { i32 key, string value} partitioned:true partitionColumns:struct partition_columns { string ds} totalNumberFiles:3 -totalFileSize:7380 -maxFileSize:2460 -minFileSize:2460 +totalFileSize:6525 +maxFileSize:2175 +minFileSize:2175 #### A masked pattern was here #### PREHOOK: query: desc formatted src_orc_merge_test_part_stat partition (ds='2011') @@ -245,7 +245,7 @@ Partition Parameters: numFiles 3 numRows 500 rawDataSize 47000 - totalSize 7380 + totalSize 6525 #### A masked pattern was here #### # Storage Information @@ -294,7 +294,7 @@ Partition Parameters: numFiles 3 numRows 1500 rawDataSize 141000 - totalSize 7380 + totalSize 6525 #### A masked pattern was here #### # Storage Information @@ -351,7 +351,7 @@ Partition Parameters: numFiles 1 numRows 1500 rawDataSize 141000 - totalSize 7059 + totalSize 6204 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/tez/orc_analyze.q.out ql/src/test/results/clientpositive/tez/orc_analyze.q.out index 979d2a7..df96d60 100644 --- ql/src/test/results/clientpositive/tez/orc_analyze.q.out +++ ql/src/test/results/clientpositive/tez/orc_analyze.q.out @@ -105,7 +105,7 @@ Table Parameters: numFiles 1 numRows 100 rawDataSize 52600 - totalSize 3042 + totalSize 3034 #### A masked pattern was here #### # Storage Information @@ -195,7 +195,7 @@ Table Parameters: numFiles 1 numRows 100 rawDataSize 52600 - totalSize 3042 + totalSize 3034 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/tez/orc_merge2.q.out ql/src/test/results/clientpositive/tez/orc_merge2.q.out index f0d92e8..8656c96 100644 --- ql/src/test/results/clientpositive/tez/orc_merge2.q.out +++ ql/src/test/results/clientpositive/tez/orc_merge2.q.out @@ -188,7 +188,7 @@ Partition Parameters: numFiles 1 numRows 4 rawDataSize 376 - totalSize 320 + totalSize 309 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/tez/orc_merge3.q.out ql/src/test/results/clientpositive/tez/orc_merge3.q.out index 534bb0a..5603a26 100644 --- ql/src/test/results/clientpositive/tez/orc_merge3.q.out +++ ql/src/test/results/clientpositive/tez/orc_merge3.q.out @@ -138,7 +138,7 @@ Table Parameters: numFiles 1 numRows 1000 rawDataSize 94000 - totalSize 2502 + totalSize 2263 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/tez/orc_merge4.q.out ql/src/test/results/clientpositive/tez/orc_merge4.q.out index 96c8089..01f9fa8 100644 --- ql/src/test/results/clientpositive/tez/orc_merge4.q.out +++ ql/src/test/results/clientpositive/tez/orc_merge4.q.out @@ -64,7 +64,7 @@ Partition Parameters: numFiles 1 numRows 500 rawDataSize 47000 - totalSize 2460 + totalSize 2175 #### A masked pattern was here #### # Storage Information @@ -129,7 +129,7 @@ Partition Parameters: numFiles 1 numRows 500 rawDataSize 47000 - totalSize 2460 + totalSize 2175 #### A masked pattern was here #### # Storage Information @@ -170,7 +170,7 @@ Partition Parameters: numFiles 1 numRows 500 rawDataSize 47000 - totalSize 2460 + totalSize 2175 #### A masked pattern was here #### # Storage Information diff --git ql/src/test/results/clientpositive/tez/vector_char_2.q.out ql/src/test/results/clientpositive/tez/vector_char_2.q.out new file mode 100644 index 0000000..2e66485 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_char_2.q.out @@ -0,0 +1,298 @@ +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table char_2 ( + key char(10), + value char(20) +) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@char_2 +POSTHOOK: query: create table char_2 ( + key char(10), + value char(20) +) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@char_2 +PREHOOK: query: insert overwrite table char_2 select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@char_2 +POSTHOOK: query: insert overwrite table char_2 select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@char_2 +POSTHOOK: Lineage: char_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: char_2.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +val_0 0 3 +val_10 10 1 +val_100 200 2 +val_103 206 2 +val_104 208 2 +PREHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: char(20)), key (type: char(10)) + outputColumnNames: value, key + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(UDFToInteger(key)), count() + keys: value (type: char(20)) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(20)) + sort order: + + Map-reduce partition columns: _col0 (type: char(20)) + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), count(VALUE._col1) + keys: KEY._col0 (type: char(20)) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: char(20)), _col1 (type: bigint), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(20)) + sort order: + + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Reducer 3 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: char(20)), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +val_0 0 3 +val_10 10 1 +val_100 200 2 +val_103 206 2 +val_104 208 2 +PREHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +val_98 196 2 +val_97 194 2 +val_96 96 1 +val_95 190 2 +val_92 92 1 +PREHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: char(20)), key (type: char(10)) + outputColumnNames: value, key + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(UDFToInteger(key)), count() + keys: value (type: char(20)) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(20)) + sort order: + + Map-reduce partition columns: _col0 (type: char(20)) + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), count(VALUE._col1) + keys: KEY._col0 (type: char(20)) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: char(20)), _col1 (type: bigint), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(20)) + sort order: - + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Reducer 3 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: char(20)), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +val_98 196 2 +val_97 194 2 +val_96 96 1 +val_95 190 2 +val_92 92 1 +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@char_2 +PREHOOK: Output: default@char_2 +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@char_2 +POSTHOOK: Output: default@char_2 diff --git ql/src/test/results/clientpositive/tez/vector_char_simple.q.out ql/src/test/results/clientpositive/tez/vector_char_simple.q.out new file mode 100644 index 0000000..bac33ec --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_char_simple.q.out @@ -0,0 +1,236 @@ +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table char_2 ( + key char(10), + value char(20) +) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@char_2 +POSTHOOK: query: create table char_2 ( + key char(10), + value char(20) +) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@char_2 +PREHOOK: query: insert overwrite table char_2 select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@char_2 +POSTHOOK: query: insert overwrite table char_2 select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@char_2 +POSTHOOK: Lineage: char_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: char_2.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select key, value +from src +order by key asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select key, value +from src +order by key asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +10 val_10 +100 val_100 +PREHOOK: query: explain select key, value +from char_2 +order by key asc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select key, value +from char_2 +order by key asc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: char(10)), value (type: char(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(10)) + sort order: + + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: char(20)) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: char(10)), VALUE._col0 (type: char(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +10 val_10 +100 val_100 +PREHOOK: query: select key, value +from src +order by key desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select key, value +from src +order by key desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +98 val_98 +98 val_98 +97 val_97 +97 val_97 +96 val_96 +PREHOOK: query: explain select key, value +from char_2 +order by key desc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select key, value +from char_2 +order by key desc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: char(10)), value (type: char(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(10)) + sort order: - + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: char(20)) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: char(10)), VALUE._col0 (type: char(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +98 val_98 +98 val_98 +97 val_97 +97 val_97 +96 val_96 +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@char_2 +PREHOOK: Output: default@char_2 +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@char_2 +POSTHOOK: Output: default@char_2 diff --git ql/src/test/results/clientpositive/tez/vector_string_temp.q.out ql/src/test/results/clientpositive/tez/vector_string_temp.q.out new file mode 100644 index 0000000..39654a3 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_string_temp.q.out @@ -0,0 +1,298 @@ +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table char_2 ( + key string, + value string +) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@char_2 +POSTHOOK: query: create table char_2 ( + key string, + value string +) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@char_2 +PREHOOK: query: insert overwrite table char_2 select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@char_2 +POSTHOOK: query: insert overwrite table char_2 select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@char_2 +POSTHOOK: Lineage: char_2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: char_2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +val_0 0 3 +val_10 10 1 +val_100 200 2 +val_103 206 2 +val_104 208 2 +PREHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: string), key (type: string) + outputColumnNames: value, key + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(UDFToInteger(key)), count() + keys: value (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), count(VALUE._col1) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Reducer 3 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +val_0 0 3 +val_10 10 1 +val_100 200 2 +val_103 206 2 +val_104 208 2 +PREHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +val_98 196 2 +val_97 194 2 +val_96 96 1 +val_95 190 2 +val_92 92 1 +PREHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) + Reducer 3 <- Reducer 2 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: string), key (type: string) + outputColumnNames: value, key + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(UDFToInteger(key)), count() + keys: value (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), count(VALUE._col1) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: - + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Reducer 3 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +val_98 196 2 +val_97 194 2 +val_96 96 1 +val_95 190 2 +val_92 92 1 +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@char_2 +PREHOOK: Output: default@char_2 +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@char_2 +POSTHOOK: Output: default@char_2 diff --git ql/src/test/results/clientpositive/tez/vector_varchar_simple.q.out ql/src/test/results/clientpositive/tez/vector_varchar_simple.q.out new file mode 100644 index 0000000..f097414 --- /dev/null +++ ql/src/test/results/clientpositive/tez/vector_varchar_simple.q.out @@ -0,0 +1,236 @@ +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table char_2 ( + key varchar(10), + value varchar(20) +) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@char_2 +POSTHOOK: query: create table char_2 ( + key varchar(10), + value varchar(20) +) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@char_2 +PREHOOK: query: insert overwrite table char_2 select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@char_2 +POSTHOOK: query: insert overwrite table char_2 select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@char_2 +POSTHOOK: Lineage: char_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: char_2.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select key, value +from src +order by key asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select key, value +from src +order by key asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +10 val_10 +100 val_100 +PREHOOK: query: explain select key, value +from char_2 +order by key asc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select key, value +from char_2 +order by key asc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: varchar(10)), value (type: varchar(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: varchar(10)) + sort order: + + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: varchar(20)) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: varchar(10)), VALUE._col0 (type: varchar(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +10 val_10 +100 val_100 +PREHOOK: query: select key, value +from src +order by key desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select key, value +from src +order by key desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +98 val_98 +98 val_98 +97 val_97 +97 val_97 +96 val_96 +PREHOOK: query: explain select key, value +from char_2 +order by key desc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select key, value +from char_2 +order by key desc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Tez + Edges: + Reducer 2 <- Map 1 (SIMPLE_EDGE) +#### A masked pattern was here #### + Vertices: + Map 1 + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: varchar(10)), value (type: varchar(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: varchar(10)) + sort order: - + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: varchar(20)) + Execution mode: vectorized + Reducer 2 + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: varchar(10)), VALUE._col0 (type: varchar(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Execution mode: vectorized + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +98 val_98 +98 val_98 +97 val_97 +97 val_97 +96 val_96 +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@char_2 +PREHOOK: Output: default@char_2 +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@char_2 +POSTHOOK: Output: default@char_2 diff --git ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out index 88f9e5f..c307141 100644 --- ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out +++ ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out @@ -264,7 +264,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -284,7 +284,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -554,7 +554,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -574,7 +574,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -621,7 +621,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -641,7 +641,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -870,7 +870,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -890,7 +890,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -1127,7 +1127,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1147,7 +1147,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -1415,7 +1415,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1435,7 +1435,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -1713,7 +1713,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1733,7 +1733,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -1979,7 +1979,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1999,7 +1999,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -2043,7 +2043,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2063,7 +2063,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -2292,7 +2292,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2312,7 +2312,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -2356,7 +2356,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2376,7 +2376,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -2613,7 +2613,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2633,7 +2633,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -2890,7 +2890,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2910,7 +2910,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -3169,7 +3169,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -3189,7 +3189,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -3458,7 +3458,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -3478,7 +3478,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -3784,7 +3784,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -3804,7 +3804,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -4173,7 +4173,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -4193,7 +4193,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -4237,7 +4237,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -4257,7 +4257,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -4496,7 +4496,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -4516,7 +4516,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -4774,7 +4774,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -4794,7 +4794,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -5238,7 +5238,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -5258,7 +5258,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -5746,7 +5746,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -5766,7 +5766,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -6103,7 +6103,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -6123,7 +6123,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -6456,7 +6456,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -6476,7 +6476,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -6806,7 +6806,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -6826,7 +6826,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -7204,7 +7204,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -7224,7 +7224,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -7553,7 +7553,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -7573,7 +7573,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part diff --git ql/src/test/results/clientpositive/tez/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/tez/vectorized_timestamp_funcs.q.out index 8b39f9c..a4705a9 100644 --- ql/src/test/results/clientpositive/tez/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/tez/vectorized_timestamp_funcs.q.out @@ -168,46 +168,46 @@ ORDER BY c1 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 -28786 1969 12 31 31 1 23 59 46 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 PREHOOK: query: EXPLAIN SELECT to_unix_timestamp(stimestamp1) AS c1, year(stimestamp1), @@ -457,45 +457,45 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### NULL NULL NULL NULL NULL NULL NULL NULL NULL -false false false false false true false false false -false true true true true true true true false -false true true true true true true true false -false true true true true true true true false -false false false false false true false false false -false false false false false true false false false -false false false false false true false false false -false true true true true true true true false -false true true true true true true true false -false true true true true true true true false -false false false false false true false false false -false false false false false true false false false -false false false false false true false false false -false true true true true true true true false -false false false false false true false false false -false true true true true true true true false -false false false false false true false false false -false false false false false true false false false -false true true true true true true true false -false true true true true true true true false -false false false false false true false false false -false false false false false true false false false -false false false false false true false false false -false false false false false true false false false -false true true true true true true true false -false true true true true true true true false -false true true true true true true true false -false false false false false true false false false -false true true true true true true true false -false false false false false true false false false -false true true true true true true true false -false false false false false true false false false -false true true true true true true true false -false true true true true true true true false -false true true true true true true true false -false false false false false true false false false -false false false false false true false false false -false true true true true true true true false -true true true true true true true true true +false false false false false false false false false +false false false false false false false false false +false false false false false false false false false +false false false false false false false false false +false false false false false false true true false +false false false false false false true true false +false false false false false false true true false +false false false false false false false false false +false false false false false false false false false +false false false false false false false false false +false false false false false false true true false +false false false false false false true true false +false false false false false false true true false +false false false false false false false false false +false false false false false false true true false +false false false false false false false false false +false false false false false false true true true +false false false false false false true true false +false false false false false false false false false +false false false false false false false false false +false false false false false false true true false +false false false false false false true true false +false false false false false false true true false +false false false false false false true true false +false false false false false false false false false +false false false false false false false false false +false false false false false false false false false +false false false false false false true true false +false false false false false false false false false +false false false false false false true true true +false false false false false false false false false +false false false false false false true true false +false false false false false false false false false +false false false false false false false false false +false false false false false false false false false +false false false false false false true true false +false false false false false false true true false +false false false false false false false false false +false false false false false false true true false PREHOOK: query: -- Wrong format. Should all be NULL. EXPLAIN SELECT to_unix_timestamp(stimestamp1) AS c1, diff --git ql/src/test/results/clientpositive/vector_between_in.q.out ql/src/test/results/clientpositive/vector_between_in.q.out index e7b64e2..3a70f3d 100644 --- ql/src/test/results/clientpositive/vector_between_in.q.out +++ ql/src/test/results/clientpositive/vector_between_in.q.out @@ -405,53 +405,6 @@ POSTHOOK: query: SELECT cdate FROM decimal_date_test WHERE cdate IN (CAST("1969- POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -1969-07-14 -1969-07-14 -1969-07-14 -1969-07-14 -1969-07-14 -1969-07-14 -1969-07-14 -1969-07-14 -1969-07-14 -1969-07-14 -1969-07-14 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 -1969-10-26 PREHOOK: query: SELECT COUNT(*) FROM decimal_date_test WHERE cdate NOT IN (CAST("1969-10-26" AS DATE), CAST("1969-07-14" AS DATE), CAST("1970-01-21" AS DATE)) PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -460,7 +413,7 @@ POSTHOOK: query: SELECT COUNT(*) FROM decimal_date_test WHERE cdate NOT IN (CAST POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -5996 +6058 PREHOOK: query: SELECT cdecimal1 FROM decimal_date_test WHERE cdecimal1 IN (2365.8945945946, 881.0135135135, -3367.6517567568) ORDER BY cdecimal1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -495,23 +448,6 @@ POSTHOOK: query: SELECT cdate FROM decimal_date_test WHERE cdate BETWEEN CAST("1 POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -1969-12-30 -1969-12-30 -1969-12-30 -1969-12-30 -1969-12-30 -1969-12-30 -1969-12-30 -1969-12-30 -1969-12-31 -1969-12-31 -1969-12-31 -1969-12-31 -1969-12-31 -1969-12-31 -1969-12-31 -1969-12-31 -1969-12-31 1970-01-01 1970-01-01 1970-01-01 @@ -536,57 +472,6 @@ POSTHOOK: Input: default@decimal_date_test 1970-01-01 1970-01-01 1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-01 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 -1970-01-02 PREHOOK: query: SELECT cdate FROM decimal_date_test WHERE cdate NOT BETWEEN CAST("1968-05-01" AS DATE) AND CAST("1971-09-01" AS DATE) ORDER BY cdate PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test @@ -595,40 +480,6040 @@ POSTHOOK: query: SELECT cdate FROM decimal_date_test WHERE cdate NOT BETWEEN CAS POSTHOOK: type: QUERY POSTHOOK: Input: default@decimal_date_test #### A masked pattern was here #### -1968-04-07 -1968-04-09 -1968-04-10 -1968-04-14 -1968-04-16 -1968-04-16 -1968-04-19 -1968-04-23 -1968-04-25 -1968-04-26 -1968-04-27 -1968-04-27 -1968-04-27 -1968-04-29 -1968-04-29 -1968-04-29 -1968-04-29 -1968-04-30 -1971-09-02 -1971-09-04 -1971-09-06 -1971-09-06 -1971-09-06 -1971-09-09 -1971-09-09 -1971-09-15 -1971-09-17 -1971-09-18 -1971-09-21 -1971-09-21 -1971-09-21 -1971-09-22 -1971-09-22 -1971-09-25 +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +9826-10-20 +7676-10-29 +0995-03-17 +0995-03-17 +0995-03-17 +1863-08-08 +1863-08-08 +1888-05-06 +1888-05-06 +1888-05-06 +1992-12-13 +2122-11-28 +2180-03-31 +2180-03-31 +2180-03-31 +8217-10-01 +9851-09-16 +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL +NULL PREHOOK: query: SELECT cdecimal1 FROM decimal_date_test WHERE cdecimal1 BETWEEN -20 AND 45.9918918919 ORDER BY cdecimal1 PREHOOK: type: QUERY PREHOOK: Input: default@decimal_date_test diff --git ql/src/test/results/clientpositive/vector_char_2.q.out ql/src/test/results/clientpositive/vector_char_2.q.out new file mode 100644 index 0000000..7d1512c --- /dev/null +++ ql/src/test/results/clientpositive/vector_char_2.q.out @@ -0,0 +1,304 @@ +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table char_2 ( + key char(10), + value char(20) +) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@char_2 +POSTHOOK: query: create table char_2 ( + key char(10), + value char(20) +) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@char_2 +PREHOOK: query: insert overwrite table char_2 select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@char_2 +POSTHOOK: query: insert overwrite table char_2 select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@char_2 +POSTHOOK: Lineage: char_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: char_2.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +val_0 0 3 +val_10 10 1 +val_100 200 2 +val_103 206 2 +val_104 208 2 +PREHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: char(20)), key (type: char(10)) + outputColumnNames: value, key + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(UDFToInteger(key)), count() + keys: value (type: char(20)) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(20)) + sort order: + + Map-reduce partition columns: _col0 (type: char(20)) + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), count(VALUE._col1) + keys: KEY._col0 (type: char(20)) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: char(20)), _col1 (type: bigint), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: char(20)) + sort order: + + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: char(20)), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +val_0 0 3 +val_10 10 1 +val_100 200 2 +val_103 206 2 +val_104 208 2 +PREHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +val_98 196 2 +val_97 194 2 +val_96 96 1 +val_95 190 2 +val_92 92 1 +PREHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: char(20)), key (type: char(10)) + outputColumnNames: value, key + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(UDFToInteger(key)), count() + keys: value (type: char(20)) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(20)) + sort order: + + Map-reduce partition columns: _col0 (type: char(20)) + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), count(VALUE._col1) + keys: KEY._col0 (type: char(20)) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: char(20)), _col1 (type: bigint), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: char(20)) + sort order: - + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: char(20)), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 49500 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +val_98 196 2 +val_97 194 2 +val_96 96 1 +val_95 190 2 +val_92 92 1 +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@char_2 +PREHOOK: Output: default@char_2 +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@char_2 +POSTHOOK: Output: default@char_2 diff --git ql/src/test/results/clientpositive/vector_char_simple.q.out ql/src/test/results/clientpositive/vector_char_simple.q.out new file mode 100644 index 0000000..72dc8aa --- /dev/null +++ ql/src/test/results/clientpositive/vector_char_simple.q.out @@ -0,0 +1,222 @@ +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table char_2 ( + key char(10), + value char(20) +) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@char_2 +POSTHOOK: query: create table char_2 ( + key char(10), + value char(20) +) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@char_2 +PREHOOK: query: insert overwrite table char_2 select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@char_2 +POSTHOOK: query: insert overwrite table char_2 select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@char_2 +POSTHOOK: Lineage: char_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: char_2.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select key, value +from src +order by key asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select key, value +from src +order by key asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +10 val_10 +100 val_100 +PREHOOK: query: explain select key, value +from char_2 +order by key asc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select key, value +from char_2 +order by key asc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: char(10)), value (type: char(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(10)) + sort order: + + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: char(20)) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: char(10)), VALUE._col0 (type: char(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +10 val_10 +100 val_100 +PREHOOK: query: select key, value +from src +order by key desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select key, value +from src +order by key desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +98 val_98 +98 val_98 +97 val_97 +97 val_97 +96 val_96 +PREHOOK: query: explain select key, value +from char_2 +order by key desc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select key, value +from char_2 +order by key desc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: char(10)), value (type: char(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: char(10)) + sort order: - + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: char(20)) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: char(10)), VALUE._col0 (type: char(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 99000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 990 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +98 val_98 +98 val_98 +97 val_97 +97 val_97 +96 val_96 +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@char_2 +PREHOOK: Output: default@char_2 +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@char_2 +POSTHOOK: Output: default@char_2 diff --git ql/src/test/results/clientpositive/vector_string_temp.q.out ql/src/test/results/clientpositive/vector_string_temp.q.out new file mode 100644 index 0000000..2fb04d7 --- /dev/null +++ ql/src/test/results/clientpositive/vector_string_temp.q.out @@ -0,0 +1,304 @@ +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table char_2 ( + key string, + value string +) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@char_2 +POSTHOOK: query: create table char_2 ( + key string, + value string +) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@char_2 +PREHOOK: query: insert overwrite table char_2 select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@char_2 +POSTHOOK: query: insert overwrite table char_2 select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@char_2 +POSTHOOK: Lineage: char_2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: char_2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +val_0 0 3 +val_10 10 1 +val_100 200 2 +val_103 206 2 +val_104 208 2 +PREHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: string), key (type: string) + outputColumnNames: value, key + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(UDFToInteger(key)), count() + keys: value (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), count(VALUE._col1) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +val_0 0 3 +val_10 10 1 +val_100 200 2 +val_103 206 2 +val_104 208 2 +PREHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select value, sum(cast(key as int)), count(*) numrows +from src +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +val_98 196 2 +val_97 194 2 +val_96 96 1 +val_95 190 2 +val_92 92 1 +PREHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 depends on stages: Stage-2 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: value (type: string), key (type: string) + outputColumnNames: value, key + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Group By Operator + aggregations: sum(UDFToInteger(key)), count() + keys: value (type: string) + mode: hash + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Map-reduce partition columns: _col0 (type: string) + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Execution mode: vectorized + Reduce Operator Tree: + Group By Operator + aggregations: sum(VALUE._col0), count(VALUE._col1) + keys: KEY._col0 (type: string) + mode: mergepartial + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + Stage: Stage-2 + Map Reduce + Map Operator Tree: + TableScan + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: - + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: bigint), _col2 (type: bigint) + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: bigint), VALUE._col1 (type: bigint) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select value, sum(cast(key as int)), count(*) numrows +from char_2 +group by value +order by value desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +val_98 196 2 +val_97 194 2 +val_96 96 1 +val_95 190 2 +val_92 92 1 +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@char_2 +PREHOOK: Output: default@char_2 +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@char_2 +POSTHOOK: Output: default@char_2 diff --git ql/src/test/results/clientpositive/vector_varchar_simple.q.out ql/src/test/results/clientpositive/vector_varchar_simple.q.out new file mode 100644 index 0000000..1c77c39 --- /dev/null +++ ql/src/test/results/clientpositive/vector_varchar_simple.q.out @@ -0,0 +1,222 @@ +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +PREHOOK: query: create table char_2 ( + key varchar(10), + value varchar(20) +) stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@char_2 +POSTHOOK: query: create table char_2 ( + key varchar(10), + value varchar(20) +) stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@char_2 +PREHOOK: query: insert overwrite table char_2 select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@char_2 +POSTHOOK: query: insert overwrite table char_2 select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@char_2 +POSTHOOK: Lineage: char_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: char_2.value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: select key, value +from src +order by key asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select key, value +from src +order by key asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +10 val_10 +100 val_100 +PREHOOK: query: explain select key, value +from char_2 +order by key asc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select key, value +from char_2 +order by key asc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: varchar(10)), value (type: varchar(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: varchar(10)) + sort order: + + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: varchar(20)) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: varchar(10)), VALUE._col0 (type: varchar(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key asc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key asc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +0 val_0 +0 val_0 +0 val_0 +10 val_10 +100 val_100 +PREHOOK: query: select key, value +from src +order by key desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +#### A masked pattern was here #### +POSTHOOK: query: select key, value +from src +order by key desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +#### A masked pattern was here #### +98 val_98 +98 val_98 +97 val_97 +97 val_97 +96 val_96 +PREHOOK: query: explain select key, value +from char_2 +order by key desc +limit 5 +PREHOOK: type: QUERY +POSTHOOK: query: explain select key, value +from char_2 +order by key desc +limit 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: char_2 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: key (type: varchar(10)), value (type: varchar(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: varchar(10)) + sort order: - + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + value expressions: _col1 (type: varchar(20)) + Execution mode: vectorized + Reduce Operator Tree: + Select Operator + expressions: KEY.reducesinkkey0 (type: varchar(10)), VALUE._col0 (type: varchar(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + ListSink + +PREHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key desc +limit 5 +PREHOOK: type: QUERY +PREHOOK: Input: default@char_2 +#### A masked pattern was here #### +POSTHOOK: query: -- should match the query from src +select key, value +from char_2 +order by key desc +limit 5 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@char_2 +#### A masked pattern was here #### +98 val_98 +98 val_98 +97 val_97 +97 val_97 +96 val_96 +PREHOOK: query: drop table char_2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@char_2 +PREHOOK: Output: default@char_2 +POSTHOOK: query: drop table char_2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@char_2 +POSTHOOK: Output: default@char_2 diff --git ql/src/test/results/clientpositive/vectorized_casts.q.out ql/src/test/results/clientpositive/vectorized_casts.q.out index e15ec55..640b15d 100644 --- ql/src/test/results/clientpositive/vectorized_casts.q.out +++ ql/src/test/results/clientpositive/vectorized_casts.q.out @@ -171,7 +171,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Execution mode: vectorized Stage: Stage-0 Fetch Operator @@ -325,29 +324,29 @@ where cbigint % 250 = 0 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc #### A masked pattern was here #### -true true NULL true true true NULL false true NULL -36 -200 NULL -2006216750 -36 -200 NULL -14 NULL NULL -36 -36 -36 -36.0 -200.0 NULL -2.00621675E9 -36.0 -200.0 NULL -14.252 NULL NULL NULL -200.0 1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1969-12-08 10:43:03.25 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.748 NULL NULL -36 -200 NULL -2006216750 -36.0 -200.0 NULL 0 1969-12-31 15:59:45.748 NULL -36.0 NULL 0.9917788534431158 NULL -true true NULL true true true NULL false true NULL -36 -200 NULL 1599879000 -36 -200 NULL -6 NULL NULL -36 -36 -36 -36.0 -200.0 NULL 1.599879E9 -36.0 -200.0 NULL -6.183 NULL NULL NULL -200.0 1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1970-01-19 04:24:39 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.817 NULL NULL -36 -200 NULL 1599879000 -36.0 -200.0 NULL 0 1969-12-31 15:59:53.817 NULL -36.0 NULL 0.9917788534431158 NULL +true true NULL true true true NULL false true NULL -36 -200 NULL -2006216750 -36 -200 NULL -15 NULL NULL -36 -36 -36 -36.0 -200.0 NULL -2.00621675E9 -36.0 -200.0 NULL -14.252 NULL NULL NULL -200.0 1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1969-12-08 10:43:03.25 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.748 NULL NULL -36 -200 NULL -2006216750 -36.0 -200.0 NULL 0 1969-12-31 15:59:45.748 NULL -36.0 NULL 0.9917788534431158 NULL +true true NULL true true true NULL false true NULL -36 -200 NULL 1599879000 -36 -200 NULL -7 NULL NULL -36 -36 -36 -36.0 -200.0 NULL 1.599879E9 -36.0 -200.0 NULL -6.183 NULL NULL NULL -200.0 1969-12-31 15:59:59.964 1969-12-31 15:59:59.8 NULL 1970-01-19 04:24:39 1969-12-31 15:59:24 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.817 NULL NULL -36 -200 NULL 1599879000 -36.0 -200.0 NULL 0 1969-12-31 15:59:53.817 NULL -36.0 NULL 0.9917788534431158 NULL true true NULL true true true NULL false true NULL -30 -200 NULL 1429852250 -30 -200 NULL 12 NULL NULL -30 -30 -30 -30.0 -200.0 NULL 1.42985225E9 -30.0 -200.0 NULL 12.935 NULL NULL NULL -200.0 1969-12-31 15:59:59.97 1969-12-31 15:59:59.8 NULL 1970-01-17 05:10:52.25 1969-12-31 15:59:30 1969-12-31 15:56:40 NULL 1969-12-31 16:00:00 1969-12-31 16:00:12.935 NULL NULL -30 -200 NULL 1429852250 -30.0 -200.0 NULL 0 1969-12-31 16:00:12.935 NULL -30.0 NULL 0.9880316240928618 NULL -true NULL true true true NULL false false true true -51 NULL 773600971 1053923250 -51 NULL 0 8 NULL 2 -51 -51 -51 -51.0 NULL 7.73600971E8 1.05392325E9 -51.0 NULL 0.0 8.451 NULL 2.0 7.7360096E8 NULL 1969-12-31 15:59:59.949 NULL 1970-01-09 14:53:20.971 1970-01-12 20:45:23.25 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -51 NULL 773600971 1053923250 -51.0 NULL FALSE 0 1969-12-31 16:00:08.451 2yK4Bx76O -51.0 1.547201942E9 -0.6702291758433747 7.73600971E8 -true NULL true true true NULL true false true true -51 NULL 747553882 -1930467250 -51 NULL 1 8 NULL NULL -51 -51 -51 -51.0 NULL 7.47553882E8 -1.93046725E9 -51.0 NULL 1.0 8.451 NULL NULL 7.4755386E8 NULL 1969-12-31 15:59:59.949 NULL 1970-01-09 07:39:13.882 1969-12-09 07:45:32.75 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -51 NULL 747553882 -1930467250 -51.0 NULL TRUE 0 1969-12-31 16:00:08.451 q8M86Fx0r -51.0 1.495107764E9 -0.6702291758433747 7.47553883E8 -true true NULL true true true NULL false true NULL 20 15601 NULL -362433250 20 15601 NULL -14 NULL NULL 20 20 20 20.0 15601.0 NULL -3.6243325E8 20.0 15601.0 NULL -14.871 NULL NULL NULL 15601.0 1969-12-31 16:00:00.02 1969-12-31 16:00:15.601 NULL 1969-12-27 11:19:26.75 1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.129 NULL NULL 20 15601 NULL -362433250 20.0 15601.0 NULL 0 1969-12-31 15:59:45.129 NULL 20.0 NULL 0.9129452507276277 NULL -true true NULL true true true NULL false true NULL -38 15601 NULL -1858689000 -38 15601 NULL -1 NULL NULL -38 -38 -38 -38.0 15601.0 NULL -1.858689E9 -38.0 15601.0 NULL -1.386 NULL NULL NULL 15601.0 1969-12-31 15:59:59.962 1969-12-31 16:00:15.601 NULL 1969-12-10 03:41:51 1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:58.614 NULL NULL -38 15601 NULL -1858689000 -38.0 15601.0 NULL 0 1969-12-31 15:59:58.614 NULL -38.0 NULL -0.2963685787093853 NULL +true NULL true true true NULL false false true true -51 NULL 773600971 1053923250 -51 NULL 0 8 NULL 2 -51 -51 -51 -51.0 NULL 7.73600971E8 1.05392325E9 -51.0 NULL 0.0 8.451 NULL 2.0 7.7360096E8 NULL 1969-12-31 15:59:59.949 NULL 1970-01-09 14:53:20.971 1970-01-12 20:45:23.25 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -51 NULL 773600971 1053923250 -51.0 NULL FALSE 0 1969-12-31 16:00:08.451 2yK4Bx76O -51.0 1.547201942E9 -0.6702291758433747 7.7360096E8 +true NULL true true true NULL true false true true -51 NULL 747553882 -1930467250 -51 NULL 1 8 NULL NULL -51 -51 -51 -51.0 NULL 7.47553882E8 -1.93046725E9 -51.0 NULL 1.0 8.451 NULL NULL 7.4755386E8 NULL 1969-12-31 15:59:59.949 NULL 1970-01-09 07:39:13.882 1969-12-09 07:45:32.75 1969-12-31 15:59:09 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:08.451 NULL NULL -51 NULL 747553882 -1930467250 -51.0 NULL TRUE 0 1969-12-31 16:00:08.451 q8M86Fx0r -51.0 1.495107764E9 -0.6702291758433747 7.47553857E8 +true true NULL true true true NULL false true NULL 20 15601 NULL -362433250 20 15601 NULL -15 NULL NULL 20 20 20 20.0 15601.0 NULL -3.6243325E8 20.0 15601.0 NULL -14.871 NULL NULL NULL 15601.0 1969-12-31 16:00:00.02 1969-12-31 16:00:15.601 NULL 1969-12-27 11:19:26.75 1969-12-31 16:00:20 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:45.129 NULL NULL 20 15601 NULL -362433250 20.0 15601.0 NULL 0 1969-12-31 15:59:45.129 NULL 20.0 NULL 0.9129452507276277 NULL +true true NULL true true true NULL false true NULL -38 15601 NULL -1858689000 -38 15601 NULL -2 NULL NULL -38 -38 -38 -38.0 15601.0 NULL -1.858689E9 -38.0 15601.0 NULL -1.3860000000000001 NULL NULL NULL 15601.0 1969-12-31 15:59:59.962 1969-12-31 16:00:15.601 NULL 1969-12-10 03:41:51 1969-12-31 15:59:22 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:58.614 NULL NULL -38 15601 NULL -1858689000 -38.0 15601.0 NULL 0 1969-12-31 15:59:58.614 NULL -38.0 NULL -0.2963685787093853 NULL true true NULL true true true NULL false true NULL -5 15601 NULL 612416000 -5 15601 NULL 4 NULL NULL -5 -5 -5 -5.0 15601.0 NULL 6.12416E8 -5.0 15601.0 NULL 4.679 NULL NULL NULL 15601.0 1969-12-31 15:59:59.995 1969-12-31 16:00:15.601 NULL 1970-01-07 18:06:56 1969-12-31 15:59:55 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 16:00:04.679 NULL NULL -5 15601 NULL 612416000 -5.0 15601.0 NULL 0 1969-12-31 16:00:04.679 NULL -5.0 NULL 0.9589242746631385 NULL -true true NULL true true true NULL false true NULL 48 15601 NULL -795361000 48 15601 NULL -9 NULL NULL 48 48 48 48.0 15601.0 NULL -7.95361E8 48.0 15601.0 NULL -9.765 NULL NULL NULL 15601.0 1969-12-31 16:00:00.048 1969-12-31 16:00:15.601 NULL 1969-12-22 11:03:59 1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:50.235 NULL NULL 48 15601 NULL -795361000 48.0 15601.0 NULL 0 1969-12-31 15:59:50.235 NULL 48.0 NULL -0.7682546613236668 NULL -true NULL true true true NULL false false true true 8 NULL -661621138 -931392750 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL -6.61621138E8 -9.3139275E8 8.0 NULL 0.0 15.892 NULL NULL -6.6162112E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-24 00:12:58.862 1969-12-20 21:16:47.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -661621138 -931392750 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 L15l8i5k558tBcDV20 8.0 -1.323242276E9 0.9893582466233818 -6.61621138E8 -true NULL true true true NULL false false true true 8 NULL -102936434 -1312782750 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL -1.02936434E8 -1.31278275E9 8.0 NULL 0.0 15.892 NULL NULL -1.02936432E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-30 11:24:23.566 1969-12-16 11:20:17.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -102936434 -1312782750 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 eJROSNhugc3kQR7Pb 8.0 -2.05872868E8 0.9893582466233818 -1.02936434E8 -true NULL true true true NULL false false true true 8 NULL 805179664 868161500 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL 8.05179664E8 8.681615E8 8.0 NULL 0.0 15.892 NULL NULL 8.0517965E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-09 23:39:39.664 1970-01-10 17:09:21.5 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 805179664 868161500 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 e005B5q 8.0 1.610359328E9 0.9893582466233818 8.05179664E8 -true NULL true true true NULL false false true true 8 NULL -669632311 1588591250 8 NULL 0 15 NULL 3 8 8 8 8.0 NULL -6.69632311E8 1.58859125E9 8.0 NULL 0.0 15.892 NULL 3.0 -6.6963232E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-23 21:59:27.689 1970-01-19 01:16:31.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -669632311 1588591250 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 3r3sDvfUkG0yTP3LnX5mNQRr 8.0 -1.339264622E9 0.9893582466233818 -6.69632311E8 -true NULL true true true NULL true false true true 8 NULL 890988972 -1862301000 8 NULL 1 15 NULL NULL 8 8 8 8.0 NULL 8.90988972E8 -1.862301E9 8.0 NULL 1.0 15.892 NULL NULL 8.9098899E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-10 23:29:48.972 1969-12-10 02:41:39 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 890988972 -1862301000 8.0 NULL TRUE 0 1969-12-31 16:00:15.892 XylAH4 8.0 1.781977944E9 0.9893582466233818 8.90988973E8 -true NULL true true true NULL true false true true 8 NULL 930867246 1205399250 8 NULL 1 15 NULL NULL 8 8 8 8.0 NULL 9.30867246E8 1.20539925E9 8.0 NULL 1.0 15.892 NULL NULL 9.3086726E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-11 10:34:27.246 1970-01-14 14:49:59.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 930867246 1205399250 8.0 NULL TRUE 0 1969-12-31 16:00:15.892 c1V8o1A 8.0 1.861734492E9 0.9893582466233818 9.30867247E8 +true true NULL true true true NULL false true NULL 48 15601 NULL -795361000 48 15601 NULL -10 NULL NULL 48 48 48 48.0 15601.0 NULL -7.95361E8 48.0 15601.0 NULL -9.765 NULL NULL NULL 15601.0 1969-12-31 16:00:00.048 1969-12-31 16:00:15.601 NULL 1969-12-22 11:03:59 1969-12-31 16:00:48 1969-12-31 20:20:01 NULL 1969-12-31 16:00:00 1969-12-31 15:59:50.235 NULL NULL 48 15601 NULL -795361000 48.0 15601.0 NULL 0 1969-12-31 15:59:50.235 NULL 48.0 NULL -0.7682546613236668 NULL +true NULL true true true NULL false false true true 8 NULL -661621138 -931392750 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL -6.61621138E8 -9.3139275E8 8.0 NULL 0.0 15.892 NULL NULL -6.6162112E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-24 00:12:58.862 1969-12-20 21:16:47.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -661621138 -931392750 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 L15l8i5k558tBcDV20 8.0 -1.323242276E9 0.9893582466233818 -6.6162112E8 +true NULL true true true NULL false false true true 8 NULL -102936434 -1312782750 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL -1.02936434E8 -1.31278275E9 8.0 NULL 0.0 15.892 NULL NULL -1.02936432E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-30 11:24:23.566 1969-12-16 11:20:17.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -102936434 -1312782750 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 eJROSNhugc3kQR7Pb 8.0 -2.05872868E8 0.9893582466233818 -1.02936432E8 +true NULL true true true NULL false false true true 8 NULL 805179664 868161500 8 NULL 0 15 NULL NULL 8 8 8 8.0 NULL 8.05179664E8 8.681615E8 8.0 NULL 0.0 15.892 NULL NULL 8.0517965E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-09 23:39:39.664 1970-01-10 17:09:21.5 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 805179664 868161500 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 e005B5q 8.0 1.610359328E9 0.9893582466233818 8.05179648E8 +true NULL true true true NULL false false true true 8 NULL -669632311 1588591250 8 NULL 0 15 NULL 3 8 8 8 8.0 NULL -6.69632311E8 1.58859125E9 8.0 NULL 0.0 15.892 NULL 3.0 -6.6963232E8 NULL 1969-12-31 16:00:00.008 NULL 1969-12-23 21:59:27.689 1970-01-19 01:16:31.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL -669632311 1588591250 8.0 NULL FALSE 0 1969-12-31 16:00:15.892 3r3sDvfUkG0yTP3LnX5mNQRr 8.0 -1.339264622E9 0.9893582466233818 -6.6963232E8 +true NULL true true true NULL true false true true 8 NULL 890988972 -1862301000 8 NULL 1 15 NULL NULL 8 8 8 8.0 NULL 8.90988972E8 -1.862301E9 8.0 NULL 1.0 15.892 NULL NULL 8.9098899E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-10 23:29:48.972 1969-12-10 02:41:39 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 890988972 -1862301000 8.0 NULL TRUE 0 1969-12-31 16:00:15.892 XylAH4 8.0 1.781977944E9 0.9893582466233818 8.90988993E8 +true NULL true true true NULL true false true true 8 NULL 930867246 1205399250 8 NULL 1 15 NULL NULL 8 8 8 8.0 NULL 9.30867246E8 1.20539925E9 8.0 NULL 1.0 15.892 NULL NULL 9.3086726E8 NULL 1969-12-31 16:00:00.008 NULL 1970-01-11 10:34:27.246 1970-01-14 14:49:59.25 1969-12-31 16:00:08 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:15.892 NULL NULL 8 NULL 930867246 1205399250 8.0 NULL TRUE 0 1969-12-31 16:00:15.892 c1V8o1A 8.0 1.861734492E9 0.9893582466233818 9.30867265E8 true true NULL true true true NULL false true NULL -59 -7196 NULL -1604890000 -59 -7196 NULL 13 NULL NULL -59 -59 -59 -59.0 -7196.0 NULL -1.60489E9 -59.0 -7196.0 NULL 13.15 NULL NULL NULL -7196.0 1969-12-31 15:59:59.941 1969-12-31 15:59:52.804 NULL 1969-12-13 02:11:50 1969-12-31 15:59:01 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:13.15 NULL NULL -59 -7196 NULL -1604890000 -59.0 -7196.0 NULL 0 1969-12-31 16:00:13.15 NULL -59.0 NULL -0.6367380071391379 NULL -true true NULL true true true NULL false true NULL -21 -7196 NULL 1542429000 -21 -7196 NULL -4 NULL NULL -21 -21 -21 -21.0 -7196.0 NULL 1.542429E9 -21.0 -7196.0 NULL -4.1 NULL NULL NULL -7196.0 1969-12-31 15:59:59.979 1969-12-31 15:59:52.804 NULL 1970-01-18 12:27:09 1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:55.9 NULL NULL -21 -7196 NULL 1542429000 -21.0 -7196.0 NULL 0 1969-12-31 15:59:55.9 NULL -21.0 NULL -0.8366556385360561 NULL -true true NULL true true true NULL false true NULL -60 -7196 NULL 1516314750 -60 -7196 NULL -7 NULL NULL -60 -60 -60 -60.0 -7196.0 NULL 1.51631475E9 -60.0 -7196.0 NULL -7.592 NULL NULL NULL -7196.0 1969-12-31 15:59:59.94 1969-12-31 15:59:52.804 NULL 1970-01-18 05:11:54.75 1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:52.408 NULL NULL -60 -7196 NULL 1516314750 -60.0 -7196.0 NULL 0 1969-12-31 15:59:52.408 NULL -60.0 NULL 0.3048106211022167 NULL +true true NULL true true true NULL false true NULL -21 -7196 NULL 1542429000 -21 -7196 NULL -5 NULL NULL -21 -21 -21 -21.0 -7196.0 NULL 1.542429E9 -21.0 -7196.0 NULL -4.1 NULL NULL NULL -7196.0 1969-12-31 15:59:59.979 1969-12-31 15:59:52.804 NULL 1970-01-18 12:27:09 1969-12-31 15:59:39 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:55.9 NULL NULL -21 -7196 NULL 1542429000 -21.0 -7196.0 NULL 0 1969-12-31 15:59:55.9 NULL -21.0 NULL -0.8366556385360561 NULL +true true NULL true true true NULL false true NULL -60 -7196 NULL 1516314750 -60 -7196 NULL -8 NULL NULL -60 -60 -60 -60.0 -7196.0 NULL 1.51631475E9 -60.0 -7196.0 NULL -7.592 NULL NULL NULL -7196.0 1969-12-31 15:59:59.94 1969-12-31 15:59:52.804 NULL 1970-01-18 05:11:54.75 1969-12-31 15:59:00 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:52.408 NULL NULL -60 -7196 NULL 1516314750 -60.0 -7196.0 NULL 0 1969-12-31 15:59:52.408 NULL -60.0 NULL 0.3048106211022167 NULL true true NULL true true true NULL false true NULL -14 -7196 NULL -1552199500 -14 -7196 NULL 11 NULL NULL -14 -14 -14 -14.0 -7196.0 NULL -1.5521995E9 -14.0 -7196.0 NULL 11.065 NULL NULL NULL -7196.0 1969-12-31 15:59:59.986 1969-12-31 15:59:52.804 NULL 1969-12-13 16:50:00.5 1969-12-31 15:59:46 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:11.065 NULL NULL -14 -7196 NULL -1552199500 -14.0 -7196.0 NULL 0 1969-12-31 16:00:11.065 NULL -14.0 NULL -0.9906073556948704 NULL true true NULL true true true NULL false true NULL 59 -7196 NULL -1137754500 59 -7196 NULL 10 NULL NULL 59 59 59 59.0 -7196.0 NULL -1.1377545E9 59.0 -7196.0 NULL 10.956 NULL NULL NULL -7196.0 1969-12-31 16:00:00.059 1969-12-31 15:59:52.804 NULL 1969-12-18 11:57:25.5 1969-12-31 16:00:59 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.956 NULL NULL 59 -7196 NULL -1137754500 59.0 -7196.0 NULL 0 1969-12-31 16:00:10.956 NULL 59.0 NULL 0.6367380071391379 NULL true true NULL true true true NULL false true NULL -8 -7196 NULL -1849991500 -8 -7196 NULL 3 NULL NULL -8 -8 -8 -8.0 -7196.0 NULL -1.8499915E9 -8.0 -7196.0 NULL 3.136 NULL NULL NULL -7196.0 1969-12-31 15:59:59.992 1969-12-31 15:59:52.804 NULL 1969-12-10 06:06:48.5 1969-12-31 15:59:52 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:03.136 NULL NULL -8 -7196 NULL -1849991500 -8.0 -7196.0 NULL 0 1969-12-31 16:00:03.136 NULL -8.0 NULL -0.9893582466233818 NULL true true NULL true true true NULL false true NULL 5 -7196 NULL -1015607500 5 -7196 NULL 10 NULL NULL 5 5 5 5.0 -7196.0 NULL -1.0156075E9 5.0 -7196.0 NULL 10.973 NULL NULL NULL -7196.0 1969-12-31 16:00:00.005 1969-12-31 15:59:52.804 NULL 1969-12-19 21:53:12.5 1969-12-31 16:00:05 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 16:00:10.973 NULL NULL 5 -7196 NULL -1015607500 5.0 -7196.0 NULL 0 1969-12-31 16:00:10.973 NULL 5.0 NULL -0.9589242746631385 NULL -true true NULL true true true NULL false true NULL -24 -7196 NULL 829111000 -24 -7196 NULL -6 NULL NULL -24 -24 -24 -24.0 -7196.0 NULL 8.29111E8 -24.0 -7196.0 NULL -6.855 NULL NULL NULL -7196.0 1969-12-31 15:59:59.976 1969-12-31 15:59:52.804 NULL 1970-01-10 06:18:31 1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.145 NULL NULL -24 -7196 NULL 829111000 -24.0 -7196.0 NULL 0 1969-12-31 15:59:53.145 NULL -24.0 NULL 0.9055783620066238 NULL -true true NULL true true true NULL false true NULL -50 -7196 NULL -1031187250 -50 -7196 NULL -5 NULL NULL -50 -50 -50 -50.0 -7196.0 NULL -1.03118725E9 -50.0 -7196.0 NULL -5.267 NULL NULL NULL -7196.0 1969-12-31 15:59:59.95 1969-12-31 15:59:52.804 NULL 1969-12-19 17:33:32.75 1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:54.733 NULL NULL -50 -7196 NULL -1031187250 -50.0 -7196.0 NULL 0 1969-12-31 15:59:54.733 NULL -50.0 NULL 0.26237485370392877 NULL -true NULL true true true NULL true false true true 11 NULL -64615982 1803053750 11 NULL 1 2 NULL 8 11 11 11 11.0 NULL -6.4615982E7 1.80305375E9 11.0 NULL 1.0 2.351 NULL 8.0 -6.4615984E7 NULL 1969-12-31 16:00:00.011 NULL 1969-12-30 22:03:04.018 1970-01-21 12:50:53.75 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL 11 NULL -64615982 1803053750 11.0 NULL TRUE 0 1969-12-31 16:00:02.351 8J5OB7K26PEV7kdbeHr3 11.0 -1.29231964E8 -0.9999902065507035 -6.4615981E7 -true NULL true true true NULL true false true true 11 NULL -335450417 1233327000 11 NULL 1 2 NULL NULL 11 11 11 11.0 NULL -3.35450417E8 1.233327E9 11.0 NULL 1.0 2.351 NULL NULL -3.35450432E8 NULL 1969-12-31 16:00:00.011 NULL 1969-12-27 18:49:09.583 1970-01-14 22:35:27 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL 11 NULL -335450417 1233327000 11.0 NULL TRUE 0 1969-12-31 16:00:02.351 dOYnqgaXoJ1P3ERwxe5N7 11.0 -6.70900834E8 -0.9999902065507035 -3.35450416E8 +true true NULL true true true NULL false true NULL -24 -7196 NULL 829111000 -24 -7196 NULL -7 NULL NULL -24 -24 -24 -24.0 -7196.0 NULL 8.29111E8 -24.0 -7196.0 NULL -6.855 NULL NULL NULL -7196.0 1969-12-31 15:59:59.976 1969-12-31 15:59:52.804 NULL 1970-01-10 06:18:31 1969-12-31 15:59:36 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:53.145 NULL NULL -24 -7196 NULL 829111000 -24.0 -7196.0 NULL 0 1969-12-31 15:59:53.145 NULL -24.0 NULL 0.9055783620066238 NULL +true true NULL true true true NULL false true NULL -50 -7196 NULL -1031187250 -50 -7196 NULL -6 NULL NULL -50 -50 -50 -50.0 -7196.0 NULL -1.03118725E9 -50.0 -7196.0 NULL -5.267 NULL NULL NULL -7196.0 1969-12-31 15:59:59.95 1969-12-31 15:59:52.804 NULL 1969-12-19 17:33:32.75 1969-12-31 15:59:10 1969-12-31 14:00:04 NULL 1969-12-31 16:00:00 1969-12-31 15:59:54.733 NULL NULL -50 -7196 NULL -1031187250 -50.0 -7196.0 NULL 0 1969-12-31 15:59:54.733 NULL -50.0 NULL 0.26237485370392877 NULL +true NULL true true true NULL true false true true 11 NULL -64615982 1803053750 11 NULL 1 2 NULL 8 11 11 11 11.0 NULL -6.4615982E7 1.80305375E9 11.0 NULL 1.0 2.351 NULL 8.0 -6.4615984E7 NULL 1969-12-31 16:00:00.011 NULL 1969-12-30 22:03:04.018 1970-01-21 12:50:53.75 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL 11 NULL -64615982 1803053750 11.0 NULL TRUE 0 1969-12-31 16:00:02.351 8J5OB7K26PEV7kdbeHr3 11.0 -1.29231964E8 -0.9999902065507035 -6.4615983E7 +true NULL true true true NULL true false true true 11 NULL -335450417 1233327000 11 NULL 1 2 NULL NULL 11 11 11 11.0 NULL -3.35450417E8 1.233327E9 11.0 NULL 1.0 2.351 NULL NULL -3.35450432E8 NULL 1969-12-31 16:00:00.011 NULL 1969-12-27 18:49:09.583 1970-01-14 22:35:27 1969-12-31 16:00:11 NULL 1969-12-31 16:00:00.001 1969-12-31 16:00:00 1969-12-31 16:00:02.351 NULL NULL 11 NULL -335450417 1233327000 11.0 NULL TRUE 0 1969-12-31 16:00:02.351 dOYnqgaXoJ1P3ERwxe5N7 11.0 -6.70900834E8 -0.9999902065507035 -3.35450431E8 diff --git ql/src/test/results/clientpositive/vectorized_ptf.q.out ql/src/test/results/clientpositive/vectorized_ptf.q.out index 202ec40..7b5ff1b 100644 --- ql/src/test/results/clientpositive/vectorized_ptf.q.out +++ ql/src/test/results/clientpositive/vectorized_ptf.q.out @@ -259,7 +259,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -279,7 +279,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -605,7 +605,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -625,7 +625,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -942,7 +942,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -962,7 +962,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -1193,7 +1193,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1213,7 +1213,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -1522,7 +1522,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1542,7 +1542,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -1861,7 +1861,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -1881,7 +1881,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -2212,7 +2212,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2232,7 +2232,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -2341,7 +2341,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2361,7 +2361,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -2559,7 +2559,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2579,7 +2579,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -2688,7 +2688,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2708,7 +2708,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -2917,7 +2917,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -2937,7 +2937,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -3235,7 +3235,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -3255,7 +3255,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -3555,7 +3555,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -3575,7 +3575,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -3885,7 +3885,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -3905,7 +3905,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -4299,7 +4299,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -4319,7 +4319,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -4726,7 +4726,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -4746,7 +4746,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -4855,7 +4855,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -4875,7 +4875,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -5133,7 +5133,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -5153,7 +5153,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -5452,7 +5452,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -5472,7 +5472,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -6003,7 +6003,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -6023,7 +6023,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -6643,7 +6643,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -6663,7 +6663,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -7088,7 +7088,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -7108,7 +7108,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -7576,7 +7576,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -7596,7 +7596,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -8014,7 +8014,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -8034,7 +8034,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -8547,7 +8547,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -8567,7 +8567,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part @@ -8984,7 +8984,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -9004,7 +9004,7 @@ STAGE PLANS: serialization.ddl struct part { i32 p_partkey, string p_name, string p_mfgr, string p_brand, string p_type, i32 p_size, string p_container, double p_retailprice, string p_comment} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde - totalSize 2571 + totalSize 2525 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.part diff --git ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out index 89ea70d..cd53491 100644 --- ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out +++ ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out @@ -161,46 +161,46 @@ ORDER BY c1 POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### -NULL NULL NULL NULL NULL NULL NULL NULL NULL -28784 1969 12 31 31 1 23 59 44 -28784 1969 12 31 31 1 23 59 44 -28784 1969 12 31 31 1 23 59 44 -28784 1969 12 31 31 1 23 59 44 -28785 1969 12 31 31 1 23 59 45 -28786 1969 12 31 31 1 23 59 46 -28787 1969 12 31 31 1 23 59 47 -28788 1969 12 31 31 1 23 59 48 -28789 1969 12 31 31 1 23 59 49 -28789 1969 12 31 31 1 23 59 49 -28790 1969 12 31 31 1 23 59 50 -28792 1969 12 31 31 1 23 59 52 -28792 1969 12 31 31 1 23 59 52 -28792 1969 12 31 31 1 23 59 52 -28792 1969 12 31 31 1 23 59 52 -28795 1969 12 31 31 1 23 59 55 -28795 1969 12 31 31 1 23 59 55 -28795 1969 12 31 31 1 23 59 55 -28798 1969 12 31 31 1 23 59 58 -28798 1969 12 31 31 1 23 59 58 -28800 1970 1 1 1 1 0 0 0 -28800 1970 1 1 1 1 0 0 0 -28802 1970 1 1 1 1 0 0 2 -28803 1970 1 1 1 1 0 0 3 -28804 1970 1 1 1 1 0 0 4 -28804 1970 1 1 1 1 0 0 4 -28805 1970 1 1 1 1 0 0 5 -28805 1970 1 1 1 1 0 0 5 -28806 1970 1 1 1 1 0 0 6 -28807 1970 1 1 1 1 0 0 7 -28807 1970 1 1 1 1 0 0 7 -28807 1970 1 1 1 1 0 0 7 -28808 1970 1 1 1 1 0 0 8 -28808 1970 1 1 1 1 0 0 8 -28809 1970 1 1 1 1 0 0 9 -28811 1970 1 1 1 1 0 0 11 -28813 1970 1 1 1 1 0 0 13 -28814 1970 1 1 1 1 0 0 14 -28815 1970 1 1 1 1 0 0 15 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 PREHOOK: query: EXPLAIN SELECT to_unix_timestamp(stimestamp1) AS c1, year(stimestamp1), @@ -436,45 +436,45 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@alltypesorc_string #### A masked pattern was here #### NULL NULL NULL NULL NULL NULL NULL NULL NULL -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true -true true true true true true true true true +false false false false false false false false false +false false false false false false false false false +false false false false false false false false false +false false false false false false false false false +false false false false false false true true false +false false false false false false true true false +false false false false false false true true false +false false false false false false false false false +false false false false false false false false false +false false false false false false false false false +false false false false false false true true false +false false false false false false true true false +false false false false false false true true false +false false false false false false false false false +false false false false false false true true false +false false false false false false false false false +false false false false false false true true true +false false false false false false true true false +false false false false false false false false false +false false false false false false false false false +false false false false false false true true false +false false false false false false true true false +false false false false false false true true false +false false false false false false true true false +false false false false false false false false false +false false false false false false false false false +false false false false false false false false false +false false false false false false true true false +false false false false false false false false false +false false false false false false true true true +false false false false false false false false false +false false false false false false true true false +false false false false false false false false false +false false false false false false false false false +false false false false false false false false false +false false false false false false true true false +false false false false false false true true false +false false false false false false false false false +false false false false false false true true false PREHOOK: query: -- Wrong format. Should all be NULL. EXPLAIN SELECT to_unix_timestamp(stimestamp1) AS c1,