diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index c61d95b..c7bb957 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -638,6 +638,8 @@ // Serde for FetchTask HIVEFETCHOUTPUTSERDE("hive.fetch.output.serde", "org.apache.hadoop.hive.serde2.DelimitedJSONSerDe"), + HIVEEXPREVALUATIONCACHE("hive.cache.expr.evaluation", true), + // Hive Variables HIVEVARIABLESUBSTITUTE("hive.variable.substitute", true), HIVEVARIABLESUBSTITUTEDEPTH("hive.variable.substitute.depth", 40), diff --git conf/hive-default.xml.template conf/hive-default.xml.template index 3996d70..7b0bdef 100644 --- conf/hive-default.xml.template +++ conf/hive-default.xml.template @@ -1722,6 +1722,17 @@ + hive.cache.expr.evaluation + true + + If true, evaluation result of deterministic expression referenced twice or more will be cached. + For example, in filter condition like ".. where key + 10 > 10 or key + 10 = 0" + "key + 10" will be evaluated/cached once and reused for following expression ("key + 10 = 0"). + Currently, this is applied only to expressions in select or filter operator. + + + + hive.hmshandler.retry.attempts 1 The number of times to retry a HMSHandler call if there were a connection error diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java index 371f541..3f1e001 100755 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeColumnEvaluator.java @@ -29,9 +29,7 @@ /** * This evaluator gets the column from the row object. */ -public class ExprNodeColumnEvaluator extends ExprNodeEvaluator { - - protected ExprNodeColumnDesc expr; +public class ExprNodeColumnEvaluator extends ExprNodeEvaluator { transient boolean simpleCase; transient StructObjectInspector inspector; @@ -41,7 +39,7 @@ transient boolean[] unionField; public ExprNodeColumnEvaluator(ExprNodeColumnDesc expr) { - this.expr = expr; + super(expr); } @Override @@ -55,45 +53,44 @@ public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveExcep simpleCase = true; inspector = (StructObjectInspector) rowInspector; field = inspector.getStructFieldRef(names[0]); - return field.getFieldObjectInspector(); + return outputOI = field.getFieldObjectInspector(); } - else { - simpleCase = false; - inspectors = new StructObjectInspector[names.length]; - fields = new StructField[names.length]; - unionField = new boolean[names.length]; - int unionIndex = -1; - for (int i = 0; i < names.length; i++) { - if (i == 0) { - inspectors[0] = (StructObjectInspector) rowInspector; - } else { - if (unionIndex != -1) { - inspectors[i] = (StructObjectInspector) ( + simpleCase = false; + inspectors = new StructObjectInspector[names.length]; + fields = new StructField[names.length]; + unionField = new boolean[names.length]; + int unionIndex = -1; + + for (int i = 0; i < names.length; i++) { + if (i == 0) { + inspectors[0] = (StructObjectInspector) rowInspector; + } else { + if (unionIndex != -1) { + inspectors[i] = (StructObjectInspector) ( (UnionObjectInspector)fields[i-1].getFieldObjectInspector()). getObjectInspectors().get(unionIndex); - } else { - inspectors[i] = (StructObjectInspector) fields[i - 1] - .getFieldObjectInspector(); - } - } - // to support names like _colx:1._coly - unionfields = names[i].split("\\:"); - fields[i] = inspectors[i].getStructFieldRef(unionfields[0]); - if (unionfields.length > 1) { - unionIndex = Integer.parseInt(unionfields[1]); - unionField[i] = true; } else { - unionIndex = -1; - unionField[i] = false; + inspectors[i] = (StructObjectInspector) fields[i - 1] + .getFieldObjectInspector(); } } - return fields[names.length - 1].getFieldObjectInspector(); + // to support names like _colx:1._coly + unionfields = names[i].split("\\:"); + fields[i] = inspectors[i].getStructFieldRef(unionfields[0]); + if (unionfields.length > 1) { + unionIndex = Integer.parseInt(unionfields[1]); + unionField[i] = true; + } else { + unionIndex = -1; + unionField[i] = false; + } } + return outputOI = fields[names.length - 1].getFieldObjectInspector(); } @Override - public Object evaluate(Object row) throws HiveException { + protected Object _evaluate(Object row, int version) throws HiveException { if (simpleCase) { return inspector.getStructFieldData(row, field); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java index 8842f0c..4fe72a0 100755 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeConstantEvaluator.java @@ -27,13 +27,12 @@ * ExprNodeConstantEvaluator. * */ -public class ExprNodeConstantEvaluator extends ExprNodeEvaluator { +public class ExprNodeConstantEvaluator extends ExprNodeEvaluator { - protected ExprNodeConstantDesc expr; transient ConstantObjectInspector writableObjectInspector; public ExprNodeConstantEvaluator(ExprNodeConstantDesc expr) { - this.expr = expr; + super(expr); writableObjectInspector = expr.getWritableObjectInspector(); } @@ -43,7 +42,7 @@ public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveExcep } @Override - public Object evaluate(Object row) throws HiveException { + protected Object _evaluate(Object row, int version) throws HiveException { return writableObjectInspector.getWritableConstantValue(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java index 261546f..bd4c243 100755 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java @@ -19,13 +19,28 @@ package org.apache.hadoop.hive.ql.exec; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; /** * ExprNodeEvaluator. * */ -public abstract class ExprNodeEvaluator { +public abstract class ExprNodeEvaluator { + + protected final T expr; + protected ObjectInspector outputOI; + + public ExprNodeEvaluator(T expr) { + this.expr = expr; + } + + /** + * Return child evaluators if exist + */ + public T getExpr() { + return expr; + } /** * Initialize should be called once and only once. Return the ObjectInspector @@ -34,11 +49,40 @@ public abstract ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException; /** + * Return initialized ObjectInspector. If it's not initilized, throws runtime exception + */ + public ObjectInspector getOutputOI() { + if (outputOI == null) { + throw new IllegalStateException("Evaluator is not initialized"); + } + return outputOI; + } + + private transient int version = -1; + private transient Object evaluation; + + public Object evaluate(Object row) throws HiveException { + return evaluate(row, -1); + } + + /** * Evaluate the expression given the row. This method should use the * rowInspector passed in from initialize to inspect the row object. The * return value will be inspected by the return value of initialize. + * If this evaluator is referenced by others, store it for them */ - public abstract Object evaluate(Object row) throws HiveException; + protected Object evaluate(Object row, int version) throws HiveException { + if (version < 0 || version != this.version) { + this.version = version; + return evaluation = _evaluate(row, version); + } + return evaluation; + } + + /** + * Evaluate value + */ + protected abstract Object _evaluate(Object row, int version) throws HiveException; /** * Return whether this node (and all children nodes) are deterministic. @@ -46,5 +90,11 @@ public boolean isDeterministic() { return true; } - + + /** + * Return child evaluators if exist + */ + public ExprNodeEvaluator[] getChildren() { + return null; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java index 0e75c0f..7bf6a90 100755 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java @@ -18,6 +18,10 @@ package org.apache.hadoop.hive.ql.exec; +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -34,7 +38,7 @@ private ExprNodeEvaluatorFactory() { } - public static ExprNodeEvaluator get(ExprNodeDesc desc) { + public static ExprNodeEvaluator get(ExprNodeDesc desc) throws HiveException { // Constant node if (desc instanceof ExprNodeConstantDesc) { return new ExprNodeConstantEvaluator((ExprNodeConstantDesc) desc); @@ -59,4 +63,56 @@ public static ExprNodeEvaluator get(ExprNodeDesc desc) { throw new RuntimeException( "Cannot find ExprNodeEvaluator for the exprNodeDesc = " + desc); } + + /** + * Should be called before eval is initialized + */ + public static ExprNodeEvaluator toCachedEval(ExprNodeEvaluator eval) { + if (eval instanceof ExprNodeGenericFuncEvaluator) { + EvaluatorContext context = new EvaluatorContext(); + iterate(eval, context); + if (context.hasReference) { + return new ExprNodeEvaluatorHead(eval); + } + } + // has nothing to be cached + return eval; + } + + private static ExprNodeEvaluator iterate(ExprNodeEvaluator eval, EvaluatorContext context) { + if (!(eval instanceof ExprNodeConstantEvaluator) && eval.isDeterministic()) { + ExprNodeEvaluator replace = context.getEvaluated(eval); + if (replace != null) { + return replace; + } + } + ExprNodeEvaluator[] children = eval.getChildren(); + if (children != null && children.length > 0) { + for (int i = 0; i < children.length; i++) { + ExprNodeEvaluator replace = iterate(children[i], context); + if (replace != null) { + children[i] = replace; + } + } + } + return null; + } + + private static class EvaluatorContext { + + private final Map cached = new HashMap(); + + private boolean hasReference; + + public ExprNodeEvaluator getEvaluated(ExprNodeEvaluator eval) { + String key = eval.getExpr().getExprString(); + ExprNodeEvaluator prev = cached.get(key); + if (prev == null) { + cached.put(key, eval); + return null; + } + hasReference = true; + return new ExprNodeEvaluatorRef(prev); + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorHead.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorHead.java new file mode 100644 index 0000000..42685fb --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorHead.java @@ -0,0 +1,50 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; + +/** + * Increases version number of each evaluations for correct caching + */ +public class ExprNodeEvaluatorHead extends ExprNodeEvaluator { + + private int counter; + private final ExprNodeEvaluator referencing; + + public ExprNodeEvaluatorHead(ExprNodeEvaluator referencing) { + super(referencing.getExpr()); + this.referencing = referencing; + } + + @Override + public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { + return outputOI = referencing.initialize(rowInspector); + } + + @Override + protected Object _evaluate(Object row, int version) throws HiveException { + return referencing.evaluate(row, next()); + } + + private int next() { + return ++counter < 0 ? counter = 0 : counter; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorRef.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorRef.java new file mode 100644 index 0000000..0a6b66a --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorRef.java @@ -0,0 +1,46 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; + +/** + * Returns evaluation result of other evaluator + */ +public class ExprNodeEvaluatorRef extends ExprNodeEvaluator { + + private int counter; + private final ExprNodeEvaluator referencing; + + public ExprNodeEvaluatorRef(ExprNodeEvaluator referencing) { + super(referencing.getExpr()); + this.referencing = referencing; + } + + @Override + public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { + return outputOI = referencing.getOutputOI(); + } + + @Override + protected Object _evaluate(Object row, int version) throws HiveException { + return referencing.evaluate(row, version); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java index 052dbce..ff32626 100755 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeFieldEvaluator.java @@ -34,9 +34,8 @@ * is struct, then s.f is the field. If s is list of struct, then s.f is the * list of struct field. */ -public class ExprNodeFieldEvaluator extends ExprNodeEvaluator { +public class ExprNodeFieldEvaluator extends ExprNodeEvaluator { - protected ExprNodeFieldDesc desc; transient ExprNodeEvaluator leftEvaluator; transient ObjectInspector leftInspector; transient StructObjectInspector structObjectInspector; @@ -44,42 +43,41 @@ transient ObjectInspector structFieldObjectInspector; transient ObjectInspector resultObjectInspector; - public ExprNodeFieldEvaluator(ExprNodeFieldDesc desc) { - this.desc = desc; + public ExprNodeFieldEvaluator(ExprNodeFieldDesc desc) throws HiveException { + super(desc); leftEvaluator = ExprNodeEvaluatorFactory.get(desc.getDesc()); } @Override public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { - leftInspector = leftEvaluator.initialize(rowInspector); - if (desc.getIsList()) { + if (expr.getIsList()) { structObjectInspector = (StructObjectInspector) ((ListObjectInspector) leftInspector) .getListElementObjectInspector(); } else { structObjectInspector = (StructObjectInspector) leftInspector; } - field = structObjectInspector.getStructFieldRef(desc.getFieldName()); + field = structObjectInspector.getStructFieldRef(expr.getFieldName()); structFieldObjectInspector = field.getFieldObjectInspector(); - if (desc.getIsList()) { + if (expr.getIsList()) { resultObjectInspector = ObjectInspectorFactory .getStandardListObjectInspector(structFieldObjectInspector); } else { resultObjectInspector = structFieldObjectInspector; } - return resultObjectInspector; + return outputOI = resultObjectInspector; } private List cachedList = new ArrayList(); @Override - public Object evaluate(Object row) throws HiveException { + protected Object _evaluate(Object row, int version) throws HiveException { // Get the result in leftInspectableObject - Object left = leftEvaluator.evaluate(row); + Object left = leftEvaluator.evaluate(row, version); - if (desc.getIsList()) { + if (expr.getIsList()) { List list = ((ListObjectInspector) leftInspector).getList(left); if (list == null) { return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java index f260b20..12d0aa6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java @@ -35,16 +35,13 @@ * ExprNodeGenericFuncEvaluator. * */ -public class ExprNodeGenericFuncEvaluator extends ExprNodeEvaluator { +public class ExprNodeGenericFuncEvaluator extends ExprNodeEvaluator { private static final Log LOG = LogFactory .getLog(ExprNodeGenericFuncEvaluator.class.getName()); - protected ExprNodeGenericFuncDesc expr; - transient GenericUDF genericUDF; transient Object rowObject; - transient ObjectInspector outputOI; transient ExprNodeEvaluator[] children; transient GenericUDF.DeferredObject[] deferredChildren; transient boolean isEager; @@ -54,42 +51,38 @@ */ class DeferredExprObject implements GenericUDF.DeferredObject { - ExprNodeEvaluator eval; - - DeferredExprObject(ExprNodeEvaluator eval) { - this.eval = eval; - } + private final boolean eager; + private final ExprNodeEvaluator eval; - public Object get() throws HiveException { - return eval.evaluate(rowObject); - } - } + private transient boolean evaluated; + private transient int version; + private transient Object obj; - /** - * Class to force eager evaluation for GenericUDF in cases where - * it is warranted. - */ - class EagerExprObject implements GenericUDF.DeferredObject { - - ExprNodeEvaluator eval; - - transient Object obj; - - EagerExprObject(ExprNodeEvaluator eval) { + DeferredExprObject(ExprNodeEvaluator eval, boolean eager) { this.eval = eval; + this.eager = eager; } - void evaluate() throws HiveException { - obj = eval.evaluate(rowObject); + @Override + public void prepare(int version) throws HiveException { + this.version = version; + this.evaluated = false; + if (eager) { + get(); + } } public Object get() throws HiveException { + if (!evaluated) { + obj = eval.evaluate(rowObject, version); + evaluated = true; + } return obj; } } - public ExprNodeGenericFuncEvaluator(ExprNodeGenericFuncDesc expr) { - this.expr = expr; + public ExprNodeGenericFuncEvaluator(ExprNodeGenericFuncDesc expr) throws HiveException { + super(expr); children = new ExprNodeEvaluator[expr.getChildExprs().size()]; isEager = false; for (int i = 0; i < children.length; i++) { @@ -109,37 +102,29 @@ public ExprNodeGenericFuncEvaluator(ExprNodeGenericFuncDesc expr) { } } } - deferredChildren = - new GenericUDF.DeferredObject[expr.getChildExprs().size()]; - for (int i = 0; i < deferredChildren.length; i++) { - if (isEager) { - deferredChildren[i] = new EagerExprObject(children[i]); - } else { - deferredChildren[i] = new DeferredExprObject(children[i]); - } + genericUDF = expr.getGenericUDF(); + if (isEager && + (genericUDF instanceof GenericUDFCase || genericUDF instanceof GenericUDFWhen)) { + throw new HiveException("Stateful expressions cannot be used inside of CASE"); } } @Override public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { + deferredChildren = new GenericUDF.DeferredObject[children.length]; + for (int i = 0; i < deferredChildren.length; i++) { + deferredChildren[i] = new DeferredExprObject(children[i], isEager); + } // Initialize all children first ObjectInspector[] childrenOIs = new ObjectInspector[children.length]; for (int i = 0; i < children.length; i++) { childrenOIs[i] = children[i].initialize(rowInspector); } - genericUDF = expr.getGenericUDF(); - if (isEager && - ((genericUDF instanceof GenericUDFCase) - || (genericUDF instanceof GenericUDFWhen))) { - throw new HiveException( - "Stateful expressions cannot be used inside of CASE"); - } MapredContext context = MapredContext.get(); if (context != null) { context.setup(genericUDF); } - this.outputOI = genericUDF.initializeAndFoldConstants(childrenOIs); - return this.outputOI; + return outputOI = genericUDF.initializeAndFoldConstants(childrenOIs); } @Override @@ -152,17 +137,20 @@ public boolean isDeterministic() { } @Override - public Object evaluate(Object row) throws HiveException { + public ExprNodeEvaluator[] getChildren() { + return children; + } + + @Override + protected Object _evaluate(Object row, int version) throws HiveException { rowObject = row; if (ObjectInspectorUtils.isConstantObjectInspector(outputOI) && isDeterministic()) { // The output of this UDF is constant, so don't even bother evaluating. return ((ConstantObjectInspector)outputOI).getWritableConstantValue(); } - if (isEager) { - for (int i = 0; i < deferredChildren.length; i++) { - ((EagerExprObject) deferredChildren[i]).evaluate(); - } + for (int i = 0; i < deferredChildren.length; i++) { + deferredChildren[i].prepare(version); } return genericUDF.evaluate(deferredChildren); } @@ -191,10 +179,8 @@ public Integer compare(Object row) throws HiveException { } rowObject = row; - if (isEager) { - for (int i = 0; i < deferredChildren.length; i++) { - ((EagerExprObject) deferredChildren[i]).evaluate(); - } + for (int i = 0; i < deferredChildren.length; i++) { + deferredChildren[i].prepare(-1); } return ((GenericUDFBaseCompare)genericUDF).compare(deferredChildren); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java index 61373f7..3aaf17c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeNullEvaluator.java @@ -29,22 +29,19 @@ * ExprNodeNullEvaluator. * */ -public class ExprNodeNullEvaluator extends ExprNodeEvaluator { - - protected ExprNodeNullDesc expr; +public class ExprNodeNullEvaluator extends ExprNodeEvaluator { public ExprNodeNullEvaluator(ExprNodeNullDesc expr) { - this.expr = expr; + super(expr); } @Override public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveException { - return PrimitiveObjectInspectorFactory.writableVoidObjectInspector; + return outputOI = PrimitiveObjectInspectorFactory.writableVoidObjectInspector; } @Override - public Object evaluate(Object row) throws HiveException { + protected Object _evaluate(Object row, int version) throws HiveException { return null; } - } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java index 917fa76..d2c981d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java @@ -68,6 +68,10 @@ protected void initializeOp(Configuration hconf) throws HiveException { heartbeatInterval = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVESENDHEARTBEAT); conditionEvaluator = ExprNodeEvaluatorFactory.get(conf.getPredicate()); + if (HiveConf.getBoolVar(hconf, HiveConf.ConfVars.HIVEEXPREVALUATIONCACHE)) { + conditionEvaluator = ExprNodeEvaluatorFactory.toCachedEval(conditionEvaluator); + } + statsMap.put(Counter.FILTERED, filtered_count); statsMap.put(Counter.PASSED, passed_count); conditionInspector = null; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java index a8502e3..31dbf41 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java @@ -98,14 +98,14 @@ } public static int populateJoinKeyValue(List[] outMap, - Map> inputMap, int posBigTableAlias) { + Map> inputMap, int posBigTableAlias) throws HiveException { return populateJoinKeyValue(outMap, inputMap, null, posBigTableAlias); } public static int populateJoinKeyValue(List[] outMap, Map> inputMap, Byte[] order, - int posBigTableAlias) { + int posBigTableAlias) throws HiveException { int total = 0; for (Entry> e : inputMap.entrySet()) { Byte key = order == null ? e.getKey() : order[e.getKey()]; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java index 408fc29..d7f2b03 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java @@ -22,6 +22,7 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.SelectDesc; @@ -52,6 +53,9 @@ protected void initializeOp(Configuration hconf) throws HiveException { for (int i = 0; i < colList.size(); i++) { assert (colList.get(i) != null); eval[i] = ExprNodeEvaluatorFactory.get(colList.get(i)); + if (HiveConf.getBoolVar(hconf, HiveConf.ConfVars.HIVEEXPREVALUATIONCACHE)) { + eval[i] = ExprNodeEvaluatorFactory.toCachedEval(eval[i]); + } } output = new Object[eval.length]; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java index c1bf8a5..3d1f55e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java @@ -50,6 +50,7 @@ * GenericUDF use DeferedObject to pass arguments. */ public static interface DeferredObject { + void prepare(int version) throws HiveException; Object get() throws HiveException; }; @@ -65,6 +66,10 @@ public DeferredJavaObject(Object value) { } @Override + public void prepare(int version) throws HiveException { + } + + @Override public Object get() throws HiveException { return value; }