diff --git a/orc/src/test/org/apache/orc/impl/TestRecordReaderImpl.java b/orc/src/test/org/apache/orc/impl/TestRecordReaderImpl.java index cdd62ac..30b42ee 100644 --- a/orc/src/test/org/apache/orc/impl/TestRecordReaderImpl.java +++ b/orc/src/test/org/apache/orc/impl/TestRecordReaderImpl.java @@ -76,7 +76,7 @@ public static PredicateLeaf createPredicateLeaf(PredicateLeaf.Operator operator, Object literal, List literalList) { return new SearchArgumentImpl.PredicateLeafImpl(operator, type, columnName, - literal, literalList); + literal, literalList, null); } // can add .verboseLogging() to cause Mockito to log invocations diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java index 69ba4a2..669e23e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractMapJoinOperator.java @@ -70,7 +70,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { if (conf.getGenJoinKeys()) { int tagLen = conf.getTagLength(); joinKeys = new List[tagLen]; - JoinUtil.populateJoinKeyValue(joinKeys, conf.getKeys(), NOTSKIPBIGTABLE); + JoinUtil.populateJoinKeyValue(joinKeys, conf.getKeys(), NOTSKIPBIGTABLE, hconf); joinKeysObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinKeys, inputObjInspectors,NOTSKIPBIGTABLE, tagLen); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java index 5512ee2..9ef94ee 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java @@ -218,11 +218,11 @@ protected void initializeOp(Configuration hconf) throws HiveException { noOuterJoin = conf.isNoOuterJoin(); totalSz = JoinUtil.populateJoinKeyValue(joinValues, conf.getExprs(), - order,NOTSKIPBIGTABLE); + order,NOTSKIPBIGTABLE, hconf); //process join filters joinFilters = new List[tagLen]; - JoinUtil.populateJoinKeyValue(joinFilters, conf.getFilters(),order,NOTSKIPBIGTABLE); + JoinUtil.populateJoinKeyValue(joinFilters, conf.getFilters(),order,NOTSKIPBIGTABLE, hconf); joinValuesObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinValues, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DynamicValueRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DynamicValueRegistry.java new file mode 100644 index 0000000..63336bd --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DynamicValueRegistry.java @@ -0,0 +1,30 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +public interface DynamicValueRegistry { + + // Abstract class to hold info required for the implementation + public static abstract class RegistryConf { + } + + Object getValue(String key) throws Exception; + + void init(RegistryConf conf) throws Exception; +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java index 24c8281..b0384df 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -38,8 +39,8 @@ private transient StructField[] fields; private transient boolean[] unionField; - public ExprNodeColumnEvaluator(ExprNodeColumnDesc expr) { - super(expr); + public ExprNodeColumnEvaluator(ExprNodeColumnDesc expr, Configuration conf) { + super(expr, conf); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java index 89a75eb..f53c3e3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantDefaultEvaluator.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; @@ -33,7 +34,11 @@ transient ObjectInspector writableObjectInspector; public ExprNodeConstantDefaultEvaluator(ExprNodeConstantDefaultDesc expr) { - super(expr); + this(expr, null); + } + + public ExprNodeConstantDefaultEvaluator(ExprNodeConstantDefaultDesc expr, Configuration conf) { + super(expr, conf); writableObjectInspector = expr.getWritableObjectInspector(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java index 4fe72a0..ca39e21 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector; @@ -32,7 +33,11 @@ transient ConstantObjectInspector writableObjectInspector; public ExprNodeConstantEvaluator(ExprNodeConstantDesc expr) { - super(expr); + this(expr, null); + } + + public ExprNodeConstantEvaluator(ExprNodeConstantDesc expr, Configuration conf) { + super(expr, conf); writableObjectInspector = expr.getWritableObjectInspector(); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeDynamicValueEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeDynamicValueEvaluator.java new file mode 100644 index 0000000..413fc2a --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeDynamicValueEvaluator.java @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.io.sarg.LiteralDelegate; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.ExprNodeDynamicValueDesc; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; + +/** + * ExprNodeConstantEvaluator. + * + */ +public class ExprNodeDynamicValueEvaluator extends ExprNodeEvaluator { + + transient ObjectInspector oi; + + public ExprNodeDynamicValueEvaluator(ExprNodeDynamicValueDesc expr, Configuration conf) { + super(expr, conf); + oi = ObjectInspectorUtils.getStandardObjectInspector(expr.getWritableObjectInspector(), ObjectInspectorCopyOption.JAVA); + } + + @Override + public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { + return oi; + } + + @Override + protected Object _evaluate(Object row, int version) throws HiveException { + LiteralDelegate literal = expr.getValue(); + literal.setConf(conf); + return ((PrimitiveObjectInspector) oi).getPrimitiveWritableObject(literal.getLiteral()); + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java index b8d6ab7..375d65f 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -30,9 +31,11 @@ protected final T expr; protected ObjectInspector outputOI; + protected Configuration conf; - public ExprNodeEvaluator(T expr) { + public ExprNodeEvaluator(T expr, Configuration conf) { this.expr = expr; + this.conf = conf; } /** @@ -109,4 +112,12 @@ public boolean isStateful() { public String toString() { return "ExprNodeEvaluator[" + expr + "]"; } + + public Configuration getConf() { + return conf; + } + + public void setConf(Configuration conf) { + this.conf = conf; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java index 0d03d8f..34aec55 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java @@ -21,11 +21,13 @@ import java.util.HashMap; import java.util.Map; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDefaultDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeDynamicValueDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; @@ -39,9 +41,13 @@ private ExprNodeEvaluatorFactory() { } public static ExprNodeEvaluator get(ExprNodeDesc desc) throws HiveException { + return get(desc, null); + } + + public static ExprNodeEvaluator get(ExprNodeDesc desc, Configuration conf) throws HiveException { // Constant node if (desc instanceof ExprNodeConstantDesc) { - return new ExprNodeConstantEvaluator((ExprNodeConstantDesc) desc); + return new ExprNodeConstantEvaluator((ExprNodeConstantDesc) desc, conf); } // Special 'default' constant node @@ -51,15 +57,19 @@ public static ExprNodeEvaluator get(ExprNodeDesc desc) throws HiveException { // Column-reference node, e.g. a column in the input row if (desc instanceof ExprNodeColumnDesc) { - return new ExprNodeColumnEvaluator((ExprNodeColumnDesc) desc); + return new ExprNodeColumnEvaluator((ExprNodeColumnDesc) desc, conf); } // Generic Function node, e.g. CASE, an operator or a UDF node if (desc instanceof ExprNodeGenericFuncDesc) { - return new ExprNodeGenericFuncEvaluator((ExprNodeGenericFuncDesc) desc); + return new ExprNodeGenericFuncEvaluator((ExprNodeGenericFuncDesc) desc, conf); } // Field node, e.g. get a.myfield1 from a if (desc instanceof ExprNodeFieldDesc) { - return new ExprNodeFieldEvaluator((ExprNodeFieldDesc) desc); + return new ExprNodeFieldEvaluator((ExprNodeFieldDesc) desc, conf); + } + // Dynamic value which will be determined during query runtime + if (desc instanceof ExprNodeDynamicValueDesc) { + return new ExprNodeDynamicValueEvaluator((ExprNodeDynamicValueDesc) desc, conf); } throw new RuntimeException( "Cannot find ExprNodeEvaluator for the exprNodeDesc = " + desc); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorHead.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorHead.java index 42685fb..991bc13 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorHead.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorHead.java @@ -30,7 +30,7 @@ private final ExprNodeEvaluator referencing; public ExprNodeEvaluatorHead(ExprNodeEvaluator referencing) { - super(referencing.getExpr()); + super(referencing.getExpr(), referencing.getConf()); this.referencing = referencing; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorRef.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorRef.java index 0a6b66a..625d486 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorRef.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorRef.java @@ -30,7 +30,7 @@ private final ExprNodeEvaluator referencing; public ExprNodeEvaluatorRef(ExprNodeEvaluator referencing) { - super(referencing.getExpr()); + super(referencing.getExpr(), referencing.getConf()); this.referencing = referencing; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java index ff32626..1241343 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.List; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; @@ -43,9 +44,9 @@ transient ObjectInspector structFieldObjectInspector; transient ObjectInspector resultObjectInspector; - public ExprNodeFieldEvaluator(ExprNodeFieldDesc desc) throws HiveException { - super(desc); - leftEvaluator = ExprNodeEvaluatorFactory.get(desc.getDesc()); + public ExprNodeFieldEvaluator(ExprNodeFieldDesc desc, Configuration conf) throws HiveException { + super(desc, conf); + leftEvaluator = ExprNodeEvaluatorFactory.get(desc.getDesc(), conf); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java index 221abd9..8b9baa6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java @@ -20,6 +20,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; @@ -91,13 +92,13 @@ public Object get() throws HiveException { } } - public ExprNodeGenericFuncEvaluator(ExprNodeGenericFuncDesc expr) throws HiveException { - super(expr); + public ExprNodeGenericFuncEvaluator(ExprNodeGenericFuncDesc expr, Configuration conf) throws HiveException { + super(expr, conf); children = new ExprNodeEvaluator[expr.getChildren().size()]; isEager = false; for (int i = 0; i < children.length; i++) { ExprNodeDesc child = expr.getChildren().get(i); - ExprNodeEvaluator nodeEvaluator = ExprNodeEvaluatorFactory.get(child); + ExprNodeEvaluator nodeEvaluator = ExprNodeEvaluatorFactory.get(child, conf); children[i] = nodeEvaluator; // If we have eager evaluators anywhere below us, then we are eager too. if (nodeEvaluator instanceof ExprNodeGenericFuncEvaluator) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java index bd0d28c..df30ab2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java @@ -60,7 +60,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { try { heartbeatInterval = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVESENDHEARTBEAT); - conditionEvaluator = ExprNodeEvaluatorFactory.get(conf.getPredicate()); + conditionEvaluator = ExprNodeEvaluatorFactory.get(conf.getPredicate(), hconf); if (HiveConf.getBoolVar(hconf, HiveConf.ConfVars.HIVEEXPREVALUATIONCACHE)) { conditionEvaluator = ExprNodeEvaluatorFactory.toCachedEval(conditionEvaluator); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java index f28d33e..8b8a17d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java @@ -206,7 +206,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { keyObjectInspectors = new ObjectInspector[numKeys]; currentKeyObjectInspectors = new ObjectInspector[numKeys]; for (int i = 0; i < numKeys; i++) { - keyFields[i] = ExprNodeEvaluatorFactory.get(conf.getKeys().get(i)); + keyFields[i] = ExprNodeEvaluatorFactory.get(conf.getKeys().get(i), hconf); keyObjectInspectors[i] = keyFields[i].initialize(rowInspector); currentKeyObjectInspectors[i] = ObjectInspectorUtils .getStandardObjectInspector(keyObjectInspectors[i], @@ -252,7 +252,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { new ExprNodeColumnDesc(TypeInfoUtils.getTypeInfoFromObjectInspector( sf.getFieldObjectInspector()), keyField.getFieldName() + "." + sf.getFieldName(), null, - false)); + false), hconf); unionExprEval.initialize(rowInspector); } } @@ -277,7 +277,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { aggregationParameterObjects[i] = new Object[parameters.size()]; for (int j = 0; j < parameters.size(); j++) { aggregationParameterFields[i][j] = ExprNodeEvaluatorFactory - .get(parameters.get(j)); + .get(parameters.get(j), hconf); aggregationParameterObjectInspectors[i][j] = aggregationParameterFields[i][j] .initialize(rowInspector); if (unionExprEval != null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java index ac5331e..3a366f6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java @@ -143,19 +143,19 @@ protected void initializeOp(Configuration hconf) throws HiveException { // process join keys joinKeys = new List[tagLen]; - JoinUtil.populateJoinKeyValue(joinKeys, conf.getKeys(), posBigTableAlias); + JoinUtil.populateJoinKeyValue(joinKeys, conf.getKeys(), posBigTableAlias, hconf); joinKeysObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinKeys, inputObjInspectors, posBigTableAlias, tagLen); // process join values joinValues = new List[tagLen]; - JoinUtil.populateJoinKeyValue(joinValues, conf.getExprs(), posBigTableAlias); + JoinUtil.populateJoinKeyValue(joinValues, conf.getExprs(), posBigTableAlias, hconf); joinValuesObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinValues, inputObjInspectors, posBigTableAlias, tagLen); // process join filters joinFilters = new List[tagLen]; - JoinUtil.populateJoinKeyValue(joinFilters, conf.getFilters(), posBigTableAlias); + JoinUtil.populateJoinKeyValue(joinFilters, conf.getFilters(), posBigTableAlias, hconf); joinFilterObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinFilters, inputObjInspectors, posBigTableAlias, tagLen); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java index 6cbcab6..7a78d7c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java @@ -121,14 +121,14 @@ } public static int populateJoinKeyValue(List[] outMap, - Map> inputMap, int posBigTableAlias) throws HiveException { - return populateJoinKeyValue(outMap, inputMap, null, posBigTableAlias); + Map> inputMap, int posBigTableAlias, Configuration conf) throws HiveException { + return populateJoinKeyValue(outMap, inputMap, null, posBigTableAlias, conf); } public static int populateJoinKeyValue(List[] outMap, Map> inputMap, Byte[] order, - int posBigTableAlias) throws HiveException { + int posBigTableAlias, Configuration conf) throws HiveException { int total = 0; for (Entry> e : inputMap.entrySet()) { if (e.getValue() == null) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java index 9049ddd..a30c771 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java @@ -63,7 +63,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { eval = new ExprNodeEvaluator[colList.size()]; for (int i = 0; i < colList.size(); i++) { assert (colList.get(i) != null); - eval[i] = ExprNodeEvaluatorFactory.get(colList.get(i)); + eval[i] = ExprNodeEvaluatorFactory.get(colList.get(i), hconf); } if (HiveConf.getBoolVar(hconf, HiveConf.ConfVars.HIVEEXPREVALUATIONCACHE)) { eval = ExprNodeEvaluatorFactory.toCachedEvals(eval); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicValueRegistryTez.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicValueRegistryTez.java new file mode 100644 index 0000000..a8d197f --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DynamicValueRegistryTez.java @@ -0,0 +1,127 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec.tez; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; +import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; +import org.apache.hadoop.hive.ql.exec.DynamicValueRegistry; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.BaseWork; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.TableDesc; +import org.apache.hadoop.hive.serde2.Deserializer; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.util.ReflectionUtils; +import org.apache.tez.runtime.api.Input; +import org.apache.tez.runtime.api.LogicalInput; +import org.apache.tez.runtime.api.ProcessorContext; +import org.apache.tez.runtime.library.api.KeyValueReader; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DynamicValueRegistryTez implements DynamicValueRegistry { + private static final Logger LOG = LoggerFactory.getLogger(DynamicValueRegistryTez.class); + + public static class RegistryConfTez extends RegistryConf { + public Configuration conf; + public BaseWork baseWork; + public ProcessorContext processorContext; + public Map inputs; + + public RegistryConfTez(Configuration conf, BaseWork baseWork, + ProcessorContext processorContext, Map inputs) { + super(); + this.conf = conf; + this.baseWork = baseWork; + this.processorContext = processorContext; + this.inputs = inputs; + } + } + + public HashMap values = new HashMap(); + + public DynamicValueRegistryTez() { + } + + public Object getValue(String key) { + if (!values.containsKey(key)) { + throw new IllegalStateException("Value does not exist in registry: " + key); + } + return values.get(key); + } + + @Override + public void init(RegistryConf conf) throws Exception { + RegistryConfTez rct = (RegistryConfTez) conf; + + for (String inputSourceName : rct.baseWork.getInputSourceToTableDescMap().keySet()) { + LOG.info("Runtime value source: " + inputSourceName); + + LogicalInput runtimeValueInput = rct.inputs.get(inputSourceName); + TableDesc inputTableDesc = rct.baseWork.getInputSourceToTableDescMap().get(inputSourceName); + List colExprs = rct.baseWork.getInputSourceToColMap().get(inputSourceName); + List dynamicValueIDs = rct.baseWork.getInputSourceToDynamicValueIDs().get(inputSourceName); + + // Setup deserializer/obj inspectors for the incoming data source + Deserializer deserializer = ReflectionUtils.newInstance(inputTableDesc.getDeserializerClass(), null); + deserializer.initialize(rct.conf, inputTableDesc.getProperties()); + ObjectInspector inspector = deserializer.getObjectInspector(); + + // Set up col expressions for the dynamic values using this input + List colExprEvaluators = new ArrayList(); + for (ExprNodeDesc expr : colExprs) { + ExprNodeEvaluator exprEval = ExprNodeEvaluatorFactory.get(expr, null); + exprEval.initialize(inspector); + colExprEvaluators.add(exprEval); + } + + runtimeValueInput.start(); + List inputList = new ArrayList(); + inputList.add(runtimeValueInput); + rct.processorContext.waitForAllInputsReady(inputList); + + KeyValueReader kvReader = (KeyValueReader) runtimeValueInput.getReader(); + long rowCount = 0; + while (kvReader.next()) { + Object row = deserializer.deserialize((Writable) kvReader.getCurrentValue()); + rowCount++; + for (int colIdx = 0; colIdx < colExprEvaluators.size(); ++colIdx) { + // Read each expression and save it to the value registry + ExprNodeEvaluator eval = colExprEvaluators.get(colIdx); + Object val = eval.evaluate(row); + String dynamicValueID = dynamicValueIDs.get(colIdx); + values.put(dynamicValueID, val); + } + } + // For now, expecting a single row (min/max, aggregated bloom filter) + if (rowCount != 1) { + throw new IllegalStateException("Expected 1 row from " + inputSourceName + ", got " + rowCount); + } + } + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java index 6f36dfb..2a73cca 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/MapRecordProcessor.java @@ -50,6 +50,7 @@ import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.mr.ExecMapper.ReportStats; import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext; +import org.apache.hadoop.hive.ql.exec.tez.DynamicValueRegistryTez.RegistryConfTez; import org.apache.hadoop.hive.ql.exec.tez.TezProcessor.TezKVOutputCollector; import org.apache.hadoop.hive.ql.exec.tez.tools.KeyValueInputMerger; import org.apache.hadoop.hive.ql.exec.vector.VectorMapOperator; @@ -285,6 +286,12 @@ public Object call() { mapOp.initializeLocalWork(jconf); + // Setup values registry + checkAbortCondition(); + mapWork.valueRegistry = new DynamicValueRegistryTez(); + RegistryConfTez registryConf = new RegistryConfTez(jconf, mapWork, processorContext, inputs); + mapWork.valueRegistry.init(registryConf); + checkAbortCondition(); initializeMapRecordSources(); mapOp.initializeMapOperator(jconf); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java index cf3c8ab..db9000b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordProcessor.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.exec.OperatorUtils; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.mr.ExecMapper.ReportStats; +import org.apache.hadoop.hive.ql.exec.tez.DynamicValueRegistryTez.RegistryConfTez; import org.apache.hadoop.hive.ql.exec.tez.TezProcessor.TezKVOutputCollector; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -148,6 +149,13 @@ void init( reducer = reduceWork.getReducer(); // Check immediately after reducer is assigned, in cae the abort came in during checkAbortCondition(); + + // Setup values registry + reduceWork.valueRegistry = new DynamicValueRegistryTez(); + RegistryConfTez registryConf = new RegistryConfTez(jconf, reduceWork, processorContext, inputs); + reduceWork.valueRegistry.init(registryConf); + checkAbortCondition(); + if (numTags > 1) { sources = new ReduceRecordSource[numTags]; mainWorkOIs = new ObjectInspector[numTags]; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java index 0cb6c8a..848fc8e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java @@ -153,7 +153,7 @@ public void assign(VectorExpressionWriter[] writers, List oids) VectorExpression vectorExpr = bigTableValueExpressions[i]; // This is a vectorized aware evaluator - ExprNodeEvaluator eval = new ExprNodeEvaluator(desc) { + ExprNodeEvaluator eval = new ExprNodeEvaluator(desc, hconf) { int columnIndex; int writerIndex; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java index 80b0a14..ac3363e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java @@ -207,7 +207,7 @@ public void assign(VectorExpressionWriter[] writers, List oids) VectorExpression vectorExpr = bigTableValueExpressions[i]; // This is a vectorized aware evaluator - ExprNodeEvaluator eval = new ExprNodeEvaluator(desc) { + ExprNodeEvaluator eval = new ExprNodeEvaluator(desc, hconf) { int columnIndex;; int writerIndex; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java index 9013084..b2ebd7e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java @@ -27,9 +27,11 @@ import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.SerializationUtilities; +import org.apache.hadoop.hive.ql.io.sarg.LiteralDelegate; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; +import org.apache.hadoop.hive.ql.plan.ExprNodeDynamicValueDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween; @@ -58,14 +60,16 @@ public class ConvertAstToSearchArg { private static final Logger LOG = LoggerFactory.getLogger(ConvertAstToSearchArg.class); - private final SearchArgument.Builder builder = - SearchArgumentFactory.newBuilder(); + private final SearchArgument.Builder builder; + private final Configuration conf; /** * Builds the expression and leaf list from the original predicate. * @param expression the expression to translate. */ - ConvertAstToSearchArg(ExprNodeGenericFuncDesc expression) { + ConvertAstToSearchArg(Configuration conf, ExprNodeGenericFuncDesc expression) { + this.conf = conf; + builder = SearchArgumentFactory.newBuilder(conf); parse(expression); } @@ -178,7 +182,7 @@ private static Object boxLiteral(ExprNodeConstantDesc constantDesc, * @param type the type of the expression * @return the literal boxed if found or null */ - private static Object findLiteral(ExprNodeGenericFuncDesc expr, + private static Object findLiteral(Configuration conf, ExprNodeGenericFuncDesc expr, PredicateLeaf.Type type) { List children = expr.getChildren(); if (children.size() != 2) { @@ -186,16 +190,29 @@ private static Object findLiteral(ExprNodeGenericFuncDesc expr, } Object result = null; for(ExprNodeDesc child: children) { - if (child instanceof ExprNodeConstantDesc) { + Object currentResult = getLiteral(conf, child, type); + if (currentResult != null) { + // Both children in the expression should not be literal if (result != null) { return null; } - result = boxLiteral((ExprNodeConstantDesc) child, type); + result = currentResult; } } return result; } + private static Object getLiteral(Configuration conf, ExprNodeDesc child, PredicateLeaf.Type type) { + if (child instanceof ExprNodeConstantDesc) { + return boxLiteral((ExprNodeConstantDesc) child, type); + } else if (child instanceof ExprNodeDynamicValueDesc) { + LiteralDelegate value = ((ExprNodeDynamicValueDesc) child).getValue(); + value.setConf(conf); + return value; + } + return null; + } + /** * Return the boxed literal at the given position * @param expr the parent node @@ -203,15 +220,12 @@ private static Object findLiteral(ExprNodeGenericFuncDesc expr, * @param position the child position to check * @return the boxed literal if found otherwise null */ - private static Object getLiteral(ExprNodeGenericFuncDesc expr, + private static Object getLiteral(Configuration conf, ExprNodeGenericFuncDesc expr, PredicateLeaf.Type type, int position) { List children = expr.getChildren(); - Object child = children.get(position); - if (child instanceof ExprNodeConstantDesc) { - return boxLiteral((ExprNodeConstantDesc) child, type); - } - return null; + ExprNodeDesc child = children.get(position); + return getLiteral(conf, child, type); } private static Object[] getLiteralList(ExprNodeGenericFuncDesc expr, @@ -268,16 +282,16 @@ private void createLeaf(PredicateLeaf.Operator operator, builder.isNull(columnName, type); break; case EQUALS: - builder.equals(columnName, type, findLiteral(expression, type)); + builder.equals(columnName, type, findLiteral(conf, expression, type)); break; case NULL_SAFE_EQUALS: - builder.nullSafeEquals(columnName, type, findLiteral(expression, type)); + builder.nullSafeEquals(columnName, type, findLiteral(conf, expression, type)); break; case LESS_THAN: - builder.lessThan(columnName, type, findLiteral(expression, type)); + builder.lessThan(columnName, type, findLiteral(conf, expression, type)); break; case LESS_THAN_EQUALS: - builder.lessThanEquals(columnName, type, findLiteral(expression, type)); + builder.lessThanEquals(columnName, type, findLiteral(conf, expression, type)); break; case IN: builder.in(columnName, type, @@ -285,8 +299,8 @@ private void createLeaf(PredicateLeaf.Operator operator, break; case BETWEEN: builder.between(columnName, type, - getLiteral(expression, type, variable + 1), - getLiteral(expression, type, variable + 2)); + getLiteral(conf, expression, type, variable + 1), + getLiteral(conf, expression, type, variable + 2)); break; } } catch (Exception e) { @@ -421,8 +435,8 @@ private void parse(ExprNodeDesc expression) { public static final String SARG_PUSHDOWN = "sarg.pushdown"; - public static SearchArgument create(ExprNodeGenericFuncDesc expression) { - return new ConvertAstToSearchArg(expression).buildSearchArgument(); + public static SearchArgument create(Configuration conf, ExprNodeGenericFuncDesc expression) { + return new ConvertAstToSearchArg(conf, expression).buildSearchArgument(); } @@ -441,7 +455,7 @@ public static SearchArgument create(byte[] kryoBytes) { public static SearchArgument createFromConf(Configuration conf) { String sargString; if ((sargString = conf.get(TableScanDesc.FILTER_EXPR_CONF_STR)) != null) { - return create(SerializationUtilities.deserializeExpression(sargString)); + return create(conf, SerializationUtilities.deserializeExpression(sargString)); } else if ((sargString = conf.get(SARG_PUSHDOWN)) != null) { return create(sargString); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java index 9e9beb0..b853a06 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java @@ -135,7 +135,7 @@ protected void generatePredicate(NodeProcessorCtx procCtx, return; } // the sargs are closely tied to hive.optimize.index.filter - SearchArgument sarg = ConvertAstToSearchArg.create(filter); + SearchArgument sarg = ConvertAstToSearchArg.create(ctxt.pctx.getConf(), filter); if (sarg == null) { return; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java index 13a0811..9d7bd33 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseWork.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.plan; import java.util.ArrayList; +import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.LinkedHashSet; import java.util.List; @@ -31,6 +32,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.HashTableDummyOperator; import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.tez.DynamicValueRegistryTez; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.hive.ql.plan.Explain.Level; @@ -76,6 +78,16 @@ public BaseWork(String name) { private int reservedMemoryMB = -1; // default to -1 means we leave it up to Tez to decide + public transient DynamicValueRegistryTez valueRegistry = null; + + // Used for value registry + private Map inputSourceToTableDescMap = + new LinkedHashMap(); + private Map> inputSourceToDynamicValueIDs = + new LinkedHashMap>(); + private Map> inputSourceToColMap = + new LinkedHashMap>(); + public void setGatheringStats(boolean gatherStats) { this.gatheringStats = gatherStats; } @@ -251,4 +263,31 @@ public void addSortCols(List sortCols) { public List getSortCols() { return sortColNames; } + + public Map getInputSourceToTableDescMap() { + return inputSourceToTableDescMap; + } + + public void setInputSourceToTableDescMap( + Map inputSourceToTableDescMap) { + this.inputSourceToTableDescMap = inputSourceToTableDescMap; + } + + public Map> getInputSourceToDynamicValueIDs() { + return inputSourceToDynamicValueIDs; + } + + public void setInputSourceToDynamicValueIDs( + Map> inputSourceToDynamicValueIDs) { + this.inputSourceToDynamicValueIDs = inputSourceToDynamicValueIDs; + } + + public Map> getInputSourceToColMap() { + return inputSourceToColMap; + } + + public void setInputSourceToColMap( + Map> inputSourceToColMap) { + this.inputSourceToColMap = inputSourceToColMap; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DynamicValue.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DynamicValue.java new file mode 100644 index 0000000..5b281d8 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DynamicValue.java @@ -0,0 +1,76 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.io.sarg.LiteralDelegate; + +public class DynamicValue implements LiteralDelegate { + + protected transient Configuration conf; + + protected String id; + + transient protected Object val; + transient boolean initialized = false; + + public DynamicValue(String id) { + this.id = id; + } + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + } + + @Override + public Configuration getConf() { + return conf; + } + + @Override + public Object getLiteral() { + if (initialized) { + return val; + } + + if (conf == null) { + throw new IllegalStateException("Cannot retrieve dynamic value " + id + " - no conf set"); + } + // Is there a way to tell whether to use MapWork or ReduceWork? + BaseWork baseWork = Utilities.getMapWork(conf); + if (baseWork == null) { + baseWork = Utilities.getReduceWork(conf); + } + if (baseWork == null) { + throw new IllegalStateException("Cannot retrieve dynamic value " + id + " - BaseWork not found"); + } + + val = baseWork.valueRegistry.getValue(id); + initialized = true; + return val; + } + + @Override + public String toString() { + // If the id is a generated unique ID then this could affect .q file golden files for tests that run EXPLAIN queries. + return "DynamicValue(" + id + ")"; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDynamicValueDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDynamicValueDesc.java new file mode 100644 index 0000000..6aa41e2 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDynamicValueDesc.java @@ -0,0 +1,76 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import org.apache.hadoop.hive.ql.io.sarg.LiteralDelegate; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; + +/** + * This expression represents a value that will be available at runtime. + * + */ +public class ExprNodeDynamicValueDesc extends ExprNodeDesc { + + private static final long serialVersionUID = 1L; + + protected LiteralDelegate value; + + public ExprNodeDynamicValueDesc() { + } + + public ExprNodeDynamicValueDesc(TypeInfo typeInfo, LiteralDelegate value) { + super(typeInfo); + this.value = value; + } + + @Override + public ExprNodeDesc clone() { + return new ExprNodeDynamicValueDesc(typeInfo, value); + } + + @Override + public boolean isSame(Object o) { + if (o instanceof ExprNodeDynamicValueDesc) { + Object otherValue = ((ExprNodeDynamicValueDesc) o).getValue(); + if (value == null) { + return otherValue == null; + } + return value.equals(otherValue); + } + return false; + } + + public LiteralDelegate getValue() { + return value; + } + + public void setValue(LiteralDelegate value) { + this.value = value; + } + + @Override + public String getExprString() { + return value != null ? value.toString() : "null dynamic literal"; + } + + @Override + public String toString() { + return value != null ? value.toString() : "null dynamic literal"; + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java index 93b50a6..6563290 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java @@ -28,6 +28,7 @@ import java.util.List; import java.util.Set; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.exec.SerializationUtilities; import org.apache.hadoop.hive.ql.io.parquet.read.ParquetFilterPredicateConverter; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument.TruthValue; @@ -44,6 +45,8 @@ */ public class TestConvertAstToSearchArg { + private final Configuration conf = new Configuration(); + private static void assertNoSharedNodes(ExpressionTree tree, Set seen ) throws Exception { @@ -547,7 +550,7 @@ public void testExpression1() throws Exception { " \n"; SearchArgumentImpl sarg = - (SearchArgumentImpl) ConvertAstToSearchArg.create(getFuncDesc(exprStr)); + (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr)); List leaves = sarg.getLeaves(); assertEquals(9, leaves.size()); @@ -836,7 +839,7 @@ public void testExpression2() throws Exception { " \n"; SearchArgumentImpl sarg = - (SearchArgumentImpl) ConvertAstToSearchArg.create(getFuncDesc(exprStr)); + (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr)); List leaves = sarg.getLeaves(); assertEquals(4, leaves.size()); @@ -1269,7 +1272,7 @@ public void testExpression3() throws Exception { " \n"; SearchArgumentImpl sarg = - (SearchArgumentImpl) ConvertAstToSearchArg.create(getFuncDesc(exprStr)); + (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr)); List leaves = sarg.getLeaves(); assertEquals(3, leaves.size()); @@ -1493,7 +1496,7 @@ id in (34,50) */ "\n"; SearchArgumentImpl sarg = - (SearchArgumentImpl) ConvertAstToSearchArg.create(getFuncDesc(exprStr)); + (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr)); List leaves = sarg.getLeaves(); assertEquals(3, leaves.size()); @@ -1763,7 +1766,7 @@ public void testExpression5() throws Exception { " \n"; SearchArgumentImpl sarg = - (SearchArgumentImpl) ConvertAstToSearchArg.create(getFuncDesc(exprStr)); + (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr)); List leaves = sarg.getLeaves(); assertEquals(1, leaves.size()); @@ -2246,7 +2249,7 @@ public void testExpression7() throws Exception { ""; SearchArgumentImpl sarg = - (SearchArgumentImpl) ConvertAstToSearchArg.create(getFuncDesc(exprStr)); + (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr)); List leaves = sarg.getLeaves(); assertEquals(9, leaves.size()); @@ -2405,7 +2408,7 @@ public void testExpression8() throws Exception { " "; SearchArgumentImpl sarg = - (SearchArgumentImpl) ConvertAstToSearchArg.create(getFuncDesc(exprStr)); + (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr)); List leaves = sarg.getLeaves(); assertEquals(0, leaves.size()); @@ -2538,7 +2541,7 @@ public void testExpression9() throws Exception { " "; SearchArgumentImpl sarg = - (SearchArgumentImpl) ConvertAstToSearchArg.create(getFuncDesc(exprStr)); + (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr)); List leaves = sarg.getLeaves(); assertEquals(0, leaves.size()); @@ -2663,7 +2666,7 @@ public void testExpression10() throws Exception { ""; SearchArgumentImpl sarg = - (SearchArgumentImpl) ConvertAstToSearchArg.create(getFuncDesc(exprStr)); + (SearchArgumentImpl) ConvertAstToSearchArg.create(conf, getFuncDesc(exprStr)); List leaves = sarg.getLeaves(); assertEquals(1, leaves.size()); @@ -2712,7 +2715,7 @@ public void TestTimestampSarg() throws Exception { "AAABgj0BRVFVQcwBBW9yZy5hcGFjaGUuaGFkb29wLmlvLkJvb2xlYW5Xcml0YWJs5Q" + "EAAAECAQFib29sZWHu"; SearchArgument sarg = - new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)) + new ConvertAstToSearchArg(conf, SerializationUtilities.deserializeExpression(serialAst)) .buildSearchArgument(); assertEquals("leaf-0", sarg.getExpression().toString()); assertEquals(1, sarg.getLeaves().size()); @@ -2731,7 +2734,7 @@ public void TestDateSarg() throws Exception { "Y2hlLmhhZG9vcC5oaXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUH" + "MAQVvcmcuYXBhY2hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABAgEBYm9vbGVh7g=="; SearchArgument sarg = - new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)) + new ConvertAstToSearchArg(conf, SerializationUtilities.deserializeExpression(serialAst)) .buildSearchArgument(); assertEquals("leaf-0", sarg.getExpression().toString()); assertEquals(1, sarg.getLeaves().size()); @@ -2751,7 +2754,7 @@ public void TestDecimalSarg() throws Exception { "oaXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUHMAQZvcmcuYXBhY2" + "hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABBAEBYm9vbGVh7g=="; SearchArgument sarg = - new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)) + new ConvertAstToSearchArg(conf, SerializationUtilities.deserializeExpression(serialAst)) .buildSearchArgument(); assertEquals("leaf-0", sarg.getExpression().toString()); assertEquals(1, sarg.getLeaves().size()); @@ -2771,7 +2774,7 @@ public void TestCharSarg() throws Exception { "vb3AuaGl2ZS5xbC51ZGYuZ2VuZXJpYy5HZW5lcmljVURGT1BFcXVh7AEAAAGCPQFFUVVBzAEGb3JnLm" + "FwYWNoZS5oYWRvb3AuaW8uQm9vbGVhbldyaXRhYmzlAQAAAQQBAWJvb2xlYe4="; SearchArgument sarg = - new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)) + new ConvertAstToSearchArg(conf, SerializationUtilities.deserializeExpression(serialAst)) .buildSearchArgument(); assertEquals("leaf-0", sarg.getExpression().toString()); assertEquals(1, sarg.getLeaves().size()); @@ -2791,7 +2794,7 @@ public void TestVarcharSarg() throws Exception { "lLmhhZG9vcC5oaXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUHMAQ" + "ZvcmcuYXBhY2hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABBAEBYm9vbGVh7g=="; SearchArgument sarg = - new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)) + new ConvertAstToSearchArg(conf, SerializationUtilities.deserializeExpression(serialAst)) .buildSearchArgument(); assertEquals("leaf-0", sarg.getExpression().toString()); assertEquals(1, sarg.getLeaves().size()); @@ -2810,7 +2813,7 @@ public void TestBigintSarg() throws Exception { "dmUucWwudWRmLmdlbmVyaWMuR2VuZXJpY1VERk9QRXF1YewBAAABgj0BRVFVQcwBBW9yZy5hcGFjaGU" + "uaGFkb29wLmlvLkJvb2xlYW5Xcml0YWJs5QEAAAECAQFib29sZWHu"; SearchArgument sarg = - new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)) + new ConvertAstToSearchArg(conf, SerializationUtilities.deserializeExpression(serialAst)) .buildSearchArgument(); assertEquals("leaf-0", sarg.getExpression().toString()); assertEquals(1, sarg.getLeaves().size()); @@ -2831,7 +2834,7 @@ public void TestBooleanSarg() throws Exception { "hlLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABAwkBAgEBYrIAAAgBAwkBB29yZy5hcGFjaGUua" + "GFkb29wLmhpdmUucWwudWRmLmdlbmVyaWMuR2VuZXJpY1VERk9QQW7kAQEGAQAAAQMJ"; SearchArgument sarg = - new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)) + new ConvertAstToSearchArg(conf, SerializationUtilities.deserializeExpression(serialAst)) .buildSearchArgument(); assertEquals("(and leaf-0 leaf-1)", sarg.getExpression().toString()); assertEquals(2, sarg.getLeaves().size()); @@ -2853,7 +2856,7 @@ public void TestFloatSarg() throws Exception { "aXZlLnFsLnVkZi5nZW5lcmljLkdlbmVyaWNVREZPUEVxdWHsAQAAAYI9AUVRVUHMAQVvcmcuYXBhY2h" + "lLmhhZG9vcC5pby5Cb29sZWFuV3JpdGFibOUBAAABAgEBYm9vbGVh7g=="; SearchArgument sarg = - new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)) + new ConvertAstToSearchArg(conf, SerializationUtilities.deserializeExpression(serialAst)) .buildSearchArgument(); assertEquals("leaf-0", sarg.getExpression().toString()); assertEquals(1, sarg.getLeaves().size()); @@ -2872,7 +2875,7 @@ public void TestDoubleSarg() throws Exception { "b29wLmhpdmUucWwudWRmLmdlbmVyaWMuR2VuZXJpY1VERk9QRXF1YewBAAABgj0BRVFVQcwBBW9yZy5" + "hcGFjaGUuaGFkb29wLmlvLkJvb2xlYW5Xcml0YWJs5QEAAAECAQFib29sZWHu"; SearchArgument sarg = - new ConvertAstToSearchArg(SerializationUtilities.deserializeExpression(serialAst)) + new ConvertAstToSearchArg(conf, SerializationUtilities.deserializeExpression(serialAst)) .buildSearchArgument(); assertEquals("leaf-0", sarg.getExpression().toString()); assertEquals(1, sarg.getLeaves().size()); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java index 8cbc26d..df42058 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestSearchArgumentImpl.java @@ -79,7 +79,7 @@ public static PredicateLeaf createPredicateLeaf(PredicateLeaf.Operator operator, Object literal, List literalList) { return new SearchArgumentImpl.PredicateLeafImpl(operator, type, columnName, - literal, literalList); + literal, literalList, null); } @Test diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/LiteralDelegate.java b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/LiteralDelegate.java new file mode 100644 index 0000000..6986588 --- /dev/null +++ b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/LiteralDelegate.java @@ -0,0 +1,30 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.io.sarg; + +import org.apache.hadoop.conf.Configurable; + +/** + * Interface to retrieve a literal value + */ +public interface LiteralDelegate extends Configurable { + + Object getLiteral(); + +} diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentFactory.java b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentFactory.java index 8fda95c..3c10c83 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentFactory.java +++ b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentFactory.java @@ -18,13 +18,20 @@ package org.apache.hadoop.hive.ql.io.sarg; +import org.apache.hadoop.conf.Configuration; + /** * A factory for creating SearchArguments, as well as modifying those created by this factory. */ public class SearchArgumentFactory { public static SearchArgument.Builder newBuilder() { - return new SearchArgumentImpl.BuilderImpl(); + return newBuilder(null); + } + + public static SearchArgument.Builder newBuilder(Configuration conf) { + return new SearchArgumentImpl.BuilderImpl(conf); } + public static void setPredicateLeafColumn(PredicateLeaf leaf, String newName) { SearchArgumentImpl.PredicateLeafImpl.setColumnName(leaf, newName); } diff --git a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java index 10d8c51..c6e1ac3 100644 --- a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java +++ b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java @@ -31,6 +31,8 @@ import java.util.Queue; import java.util.Set; +import org.apache.hadoop.conf.Configuration; + /** * The implementation of SearchArguments. Visible for testing only. */ @@ -57,27 +59,17 @@ public PredicateLeafImpl(Operator operator, Type type, String columnName, Object literal, - List literalList) { + List literalList, Configuration conf) { this.operator = operator; this.type = type; this.columnName = columnName; this.literal = literal; - if (literal != null) { - if (literal.getClass() != type.getValueClass()) { - throw new IllegalArgumentException("Wrong value class " + - literal.getClass().getName() + " for " + type + "." + operator + - " leaf"); - } - } + checkLiteralType(literal, type, conf); this.literalList = literalList; if (literalList != null) { Class valueCls = type.getValueClass(); for(Object lit: literalList) { - if (lit != null && lit.getClass() != valueCls) { - throw new IllegalArgumentException("Wrong value class item " + - lit.getClass().getName() + " for " + type + "." + operator + - " leaf"); - } + checkLiteralType(lit, type, conf); } } } @@ -99,6 +91,10 @@ public String getColumnName() { @Override public Object getLiteral() { + if (literal instanceof LiteralDelegate) { + return ((LiteralDelegate) literal).getLiteral(); + } + // To get around a kryo 2.22 bug while deserialize a Timestamp into Date // (https://github.com/EsotericSoftware/kryo/issues/88) // When we see a Date, convert back into Timestamp @@ -169,6 +165,23 @@ public static void setColumnName(PredicateLeaf leaf, String newName) { assert leaf instanceof PredicateLeafImpl; ((PredicateLeafImpl)leaf).columnName = newName; } + + protected void checkLiteralType(Object literal, Type type, Configuration conf) { + if (literal == null) { + return; + } + + if (literal instanceof LiteralDelegate) { + // Give it a pass. Optionally, have LiteralDelegate provide a getLiteralClass() to check. + ((LiteralDelegate) literal).setConf(conf); + } else { + if (literal.getClass() != type.getValueClass()) { + throw new IllegalArgumentException("Wrong value class " + + literal.getClass().getName() + " for " + type + "." + operator + + " leaf"); + } + } + } } private final List leaves; @@ -218,6 +231,11 @@ public String toString() { static class BuilderImpl implements Builder { + Configuration conf; + public BuilderImpl(Configuration conf) { + this.conf = conf; + } + // max threshold for CNF conversion. having >8 elements in andList will be // converted to maybe private static final int CNF_COMBINATIONS_THRESHOLD = 256; @@ -291,7 +309,7 @@ public Builder lessThan(String column, PredicateLeaf.Type type, } else { PredicateLeaf leaf = new PredicateLeafImpl(PredicateLeaf.Operator.LESS_THAN, - type, column, literal, null); + type, column, literal, null, conf); parent.getChildren().add(new ExpressionTree(addLeaf(leaf))); } return this; @@ -306,7 +324,7 @@ public Builder lessThanEquals(String column, PredicateLeaf.Type type, } else { PredicateLeaf leaf = new PredicateLeafImpl(PredicateLeaf.Operator.LESS_THAN_EQUALS, - type, column, literal, null); + type, column, literal, null, conf); parent.getChildren().add(new ExpressionTree(addLeaf(leaf))); } return this; @@ -321,7 +339,7 @@ public Builder equals(String column, PredicateLeaf.Type type, } else { PredicateLeaf leaf = new PredicateLeafImpl(PredicateLeaf.Operator.EQUALS, - type, column, literal, null); + type, column, literal, null, conf); parent.getChildren().add(new ExpressionTree(addLeaf(leaf))); } return this; @@ -336,7 +354,7 @@ public Builder nullSafeEquals(String column, PredicateLeaf.Type type, } else { PredicateLeaf leaf = new PredicateLeafImpl(PredicateLeaf.Operator.NULL_SAFE_EQUALS, - type, column, literal, null); + type, column, literal, null, conf); parent.getChildren().add(new ExpressionTree(addLeaf(leaf))); } return this; @@ -358,7 +376,7 @@ public Builder in(String column, PredicateLeaf.Type type, PredicateLeaf leaf = new PredicateLeafImpl(PredicateLeaf.Operator.IN, - type, column, null, argList); + type, column, null, argList, conf); parent.getChildren().add(new ExpressionTree(addLeaf(leaf))); } return this; @@ -372,7 +390,7 @@ public Builder isNull(String column, PredicateLeaf.Type type) { } else { PredicateLeaf leaf = new PredicateLeafImpl(PredicateLeaf.Operator.IS_NULL, - type, column, null, null); + type, column, null, null, conf); parent.getChildren().add(new ExpressionTree(addLeaf(leaf))); } return this; @@ -390,7 +408,7 @@ public Builder between(String column, PredicateLeaf.Type type, Object lower, argList.add(upper); PredicateLeaf leaf = new PredicateLeafImpl(PredicateLeaf.Operator.BETWEEN, - type, column, null, argList); + type, column, null, argList, conf); parent.getChildren().add(new ExpressionTree(addLeaf(leaf))); } return this;