diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java b/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java index c965ce6..7806466 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleAvg.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.contrib.udaf.example; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDAF; import org.apache.hadoop.hive.ql.exec.UDAFEvaluator; @@ -32,6 +33,8 @@ * more efficient. * */ +@Description(name = "example_avg", +value = "_FUNC_(col) - Example UDAF to compute average") public final class UDAFExampleAvg extends UDAF { /** diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java b/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java index e2680ac..cdcec43 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.Collections; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDAF; import org.apache.hadoop.hive.ql.exec.UDAFEvaluator; @@ -35,6 +36,8 @@ * implement built-in aggregation functions, which are harder to program but * more efficient. */ +@Description(name = "example_group_concat", +value = "_FUNC_(col) - Example UDAF that concatenates all arguments from different rows into a single string") public class UDAFExampleGroupConcat extends UDAF { /** diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java b/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java index 7bc19d9..ccb36d0 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java @@ -19,11 +19,13 @@ package org.apache.hadoop.hive.contrib.udaf.example; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDAF; /** * Returns the max N double values. */ +@Description(name = "example_max_n", value = "_FUNC_(expr) - Example UDAF that returns the max N double values") public class UDAFExampleMaxN extends UDAF { /** diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java b/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java index 56ba3b6..947167a 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java @@ -19,11 +19,13 @@ package org.apache.hadoop.hive.contrib.udaf.example; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDAF; /** * Returns the min N double values. */ +@Description(name = "example_min_n", value = "_FUNC_(expr) - Example UDAF that returns the min N double values") public class UDAFExampleMinN extends UDAF{ /** diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java b/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java index 719c3e1..18b1df6 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java @@ -17,12 +17,14 @@ */ package org.apache.hadoop.hive.contrib.udf.example; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; /** * UDFExampleAdd. * */ +@Description(name = "example_add", value = "_FUNC_(expr) - Example UDAF that returns the sum") public class UDFExampleAdd extends UDF { public Integer evaluate(Integer... a) { diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java b/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java index 879e77e..9a1a382 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java @@ -19,12 +19,14 @@ import java.util.List; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; /** * UDFExampleArraySum. * */ +@Description(name = "example_arraysum", value = "_FUNC_(expr) - Example UDAF that returns the sum") public class UDFExampleArraySum extends UDF { public Double evaluate(List a) { diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java b/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java index a92ad70..bc54e3c 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java @@ -17,12 +17,14 @@ */ package org.apache.hadoop.hive.contrib.udf.example; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; /** * UDFExampleFormat. * */ +@Description(name = "example_format", value = "_FUNC_(expr) - Example UDAF that returns formated String") public class UDFExampleFormat extends UDF { public String evaluate(String format, Object... args) { diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java b/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java index a13c05a..6b7360f 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java @@ -21,12 +21,15 @@ import java.util.Collections; import java.util.Map; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; /** * UDFExampleMapConcat. * */ +@Description(name = "example_mapconcat", +value = "_FUNC_(expr) - Example UDAF that returns contents of Map as a formated String") public class UDFExampleMapConcat extends UDF { public String evaluate(Map a) { diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java b/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java index 299b3f0..a4fc796 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java @@ -19,12 +19,15 @@ import java.util.List; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; /** * UDFExampleStructPrint. * */ +@Description(name = "example_structprint", +value = "_FUNC_(obj) - Example UDAF that returns contents of an object") public class UDFExampleStructPrint extends UDF { public String evaluate(Object a) { diff --git a/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java b/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java index 032322a..8094946 100644 --- a/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java +++ b/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java @@ -20,6 +20,7 @@ import java.util.ArrayList; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; @@ -34,6 +35,8 @@ * to test outputting of rows on close with lateral view. * */ +@Description(name = "udtfCount2", +value = "_FUNC_(col) - UDF outputs the number of rows seen, twice.") public class GenericUDTFCount2 extends GenericUDTF { private transient Integer count = Integer.valueOf(0); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java index 7b7fd71..f9842fa 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.udf; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDAF; import org.apache.hadoop.hive.ql.exec.UDAFEvaluator; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -32,6 +33,8 @@ * UDAFTestMax. * */ +@Description(name = "test_max", +value = "_FUNC_(col) - UDF to report Max Value") public class UDAFTestMax extends UDAF { /** diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java index 61c7e0c..f804764 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java @@ -27,6 +27,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; @@ -34,6 +35,8 @@ /** * A UDF for testing, which does key/value lookup from a file */ +@Description(name = "lookup", +value = "_FUNC_(col) - UDF for key/value lookup from a file") public class UDFFileLookup extends UDF { static Log LOG = LogFactory.getLog(UDFFileLookup.class); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java index 66a30ab..382fa44 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java @@ -18,11 +18,14 @@ package org.apache.hadoop.hive.ql.udf; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; /** * A UDF for testing, which throws RuntimeException if the length of a string. */ +@Description(name = "test_error", +value = "_FUNC_(col) - UDF throws RuntimeException if expression evaluates to false") public class UDFTestErrorOnFalse extends UDF { public int evaluate(Boolean b) { diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java index 9e75c51..da3ea38 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.udf; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; @@ -25,6 +26,8 @@ /** * A UDF for testing, which evaluates the length of a string. */ +@Description(name = "testlength", +value = "_FUNC_(col) - UDF evaluates the length of the string") public class UDFTestLength extends UDF { IntWritable result = new IntWritable(); diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java index b1aab45..ac083f8 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java @@ -18,12 +18,15 @@ package org.apache.hadoop.hive.ql.udf; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; /** * A UDF for testing, which evaluates the length of a string. This UDF uses Java * Primitive classes for parameters. */ +@Description(name = "testlength2", +value = "_FUNC_(col) - UDF evaluates the length of the string and returns value as Java Integer") public class UDFTestLength2 extends UDF { public Integer evaluate(String s) { diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java index d3b525e..5fa63f1 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -26,7 +27,8 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.Reporter; - +@Description(name = "counter", +value = "_FUNC_(col) - UDF to report MR counter values") public class DummyContextUDF extends GenericUDF { private MapredContext context; diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java index 4ec7431..bf6c7c2 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -27,6 +28,8 @@ /** * A test GenericUDF to return native Java's boolean type */ +@Description(name = "test_udf_get_java_boolean", +value = "_FUNC_(str) - GenericUDF to return native Java's boolean type") public class GenericUDFTestGetJavaBoolean extends GenericUDF { ObjectInspector[] argumentOIs; diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java index ead45ae..914cebf 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -27,6 +28,8 @@ /** * A test GenericUDF to return native Java's string type */ +@Description(name = "test_udf_get_java_string", +value = "_FUNC_(str) - GenericUDF to return native Java's string type") public class GenericUDFTestGetJavaString extends GenericUDF { ObjectInspector[] argumentOIs; diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java index dedf91d..0ab8d0b 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java @@ -21,6 +21,7 @@ import java.util.HashSet; import java.util.Set; +import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; @@ -34,6 +35,8 @@ /** * Mimics oracle's function translate(str1, str2, str3). */ +@Description(name = "test_translate", +value = "_FUNC_(str1, str2, str3) - Mimics oracle's function translate(str1, str2, str3)") public class GenericUDFTestTranslate extends GenericUDF { private transient ObjectInspector[] argumentOIs; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java index 4e8f6af..949cdc0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryProperties.java @@ -50,6 +50,7 @@ private int noOfJoins = 0; private int noOfOuterJoins = 0; + private boolean hasLateralViews; private boolean multiDestQuery; private boolean filterWithSubQuery; @@ -72,6 +73,14 @@ public int getOuterJoinCount() { return noOfOuterJoins; } + public void setHasLateralViews(boolean hasLateralViews) { + this.hasLateralViews = hasLateralViews; + } + + public boolean hasLateralViews() { + return hasLateralViews; + } + public boolean hasGroupBy() { return hasGroupBy; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java index 34e958c..83242b3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/stats/FilterSelectivityEstimator.java @@ -1,3 +1,20 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.apache.hadoop.hive.ql.optimizer.optiq.stats; import java.util.BitSet; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java index 0426809..9771b97 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java @@ -183,6 +183,11 @@ private static String getName(GenericUDF hiveUDF) { if (udfAnnotation != null && udfAnnotation instanceof Description) { Description udfDescription = (Description) udfAnnotation; udfName = udfDescription.name(); + if (udfName != null) { + String[] aliases = udfName.split(","); + if (aliases.length > 0) + udfName = aliases[0]; + } } if (udfName == null || udfName.isEmpty()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 12eb95b..216c69b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -1110,6 +1110,7 @@ public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1) processSubQuery(qb, frm); } else if (frm.getToken().getType() == HiveParser.TOK_LATERAL_VIEW || frm.getToken().getType() == HiveParser.TOK_LATERAL_VIEW_OUTER) { + queryProperties.setHasLateralViews(true); processLateralView(qb, frm); } else if (isJoinToken(frm)) { processJoin(qb, frm); @@ -9577,7 +9578,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { || ast.getToken().getType() == HiveParser.TOK_EXPLAIN; if (!tokenTypeIsQuery || createVwDesc != null || !HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) - || !canHandleQuery()) { + || !canHandleQuery(qb)) { runCBO = false; } @@ -11847,17 +11848,16 @@ else return (ltd.getReplace() ? WriteEntity.WriteType.INSERT_OVERWRITE : /* * Entry point to Optimizations using Optiq. */ - - // TODO: Extend QP to indicate LV, Multi Insert, Cubes, Rollups... - private boolean canHandleQuery() { + private boolean canHandleQuery(QB qbToChk) { boolean runOptiqPlanner = false; // Assumption: If top level QB is query then everything below it must also // be Query - if (qb.getIsQuery() + if (qbToChk.getIsQuery() && ((queryProperties.getJoinCount() > 1) || conf.getBoolVar(ConfVars.HIVE_IN_TEST)) && !queryProperties.hasClusterBy() && !queryProperties.hasDistributeBy() && !queryProperties.hasSortBy() && !queryProperties.hasPTF() - && !queryProperties.usesScript() && !queryProperties.hasMultiDestQuery()) { + && !queryProperties.usesScript() && !queryProperties.hasMultiDestQuery() + && !queryProperties.hasLateralViews()) { runOptiqPlanner = true; } else { LOG.info("Can not invoke CBO; query contains operators not supported for CBO."); @@ -12259,7 +12259,10 @@ private RelNode genJoinLogicalPlan(ASTNode joinParseTree, Map a JoinType hiveJoinType = null; if (joinParseTree.getToken().getType() == HiveParser.TOK_UNIQUEJOIN) { - throw new RuntimeException("CBO does not support Unique Join"); + String msg = String.format("UNIQUE JOIN is currently not supported in CBO," + + " turn off cbo to use UNIQUE JOIN."); + LOG.debug(msg); + throw new OptiqSemanticException(msg); } // 1. Determine Join Type @@ -13357,9 +13360,8 @@ private RelNode genSelectLogicalPlan(QB qb, RelNode srcRel) throws SemanticExcep toString( selExprList.getChild(0).getTokenStartIndex(), selExprList.getChild(0).getTokenStopIndex()); - String msg = String.format("Hint specified for %s." + - " Currently we don't support hints in CBO," + - " turn off cbo to use hints.", hint); + String msg = String.format("Hint specified for %s." + + " Currently we don't support hints in CBO, turn off cbo to use hints.", hint); LOG.debug(msg); throw new OptiqSemanticException(msg); } @@ -13370,7 +13372,10 @@ private RelNode genSelectLogicalPlan(QB qb, RelNode srcRel) throws SemanticExcep // 4. Bailout if select involves Transform boolean isInTransform = (selExprList.getChild(posn).getChild(0).getType() == HiveParser.TOK_TRANSFORM); if (isInTransform) { - throw new RuntimeException("SELECT TRANSFORM not supported"); + String msg = String.format("SELECT TRANSFORM is currently not supported in CBO," + + " turn off cbo to use TRANSFORM."); + LOG.debug(msg); + throw new OptiqSemanticException(msg); } // 5. Bailout if select involves UDTF @@ -13384,7 +13389,10 @@ private RelNode genSelectLogicalPlan(QB qb, RelNode srcRel) throws SemanticExcep genericUDTF = fi.getGenericUDTF(); } if (genericUDTF != null) { - throw new RuntimeException("SELECT UDTF not supported"); + String msg = String.format("UDTF " + funcName + " is currently not supported in CBO," + + " turn off cbo to use UDTF " + funcName); + LOG.debug(msg); + throw new OptiqSemanticException(msg); } } @@ -13521,6 +13529,14 @@ private RelNode genLogicalPlan(QB qb) throws SemanticException { // First generate all the opInfos for the elements in the from clause Map aliasToRel = new HashMap(); + // 0. Check if we can handle the query + // This check is needed here because of SubQuery + if (!canHandleQuery(qb)) { + String msg = String.format("CBO Can not handle Sub Query"); + LOG.debug(msg); + throw new OptiqSemanticException(msg); + } + // 1. Build Rel For Src (SubQuery, TS, Join) // 1.1. Recurse over the subqueries to fill the subquery part of the plan for (String subqAlias : qb.getSubqAliases()) { @@ -13634,10 +13650,14 @@ private RelNode genGBHavingLogicalPlan(QB qb, RelNode srcRel, Map 1) - throw new RuntimeException("Multi Insert is not supported"); + if (qbp.getClauseNames().size() > 1) { + String msg = String.format("Multi Insert is currently not supported in CBO," + + " turn off cbo to use Multi Insert."); + LOG.debug(msg); + throw new OptiqSemanticException(msg); + } return qbp; } diff --git a/ql/src/test/queries/clientpositive/cbo_correctness.q b/ql/src/test/queries/clientpositive/cbo_correctness.q index ddba40a..f7f0722 100644 --- a/ql/src/test/queries/clientpositive/cbo_correctness.q +++ b/ql/src/test/queries/clientpositive/cbo_correctness.q @@ -332,6 +332,7 @@ having b.p_mfgr not in from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a where min(p_retailprice) = l and r - l > 600 ) + order by b.p_mfgr ; -- agg, non corr, having @@ -344,6 +345,7 @@ having b.p_mfgr not in group by p_mfgr having max(p_retailprice) - min(p_retailprice) > 600 ) + order by b.p_mfgr ; -- 17. SubQueries In diff --git a/ql/src/test/results/clientpositive/cbo_correctness.q.out b/ql/src/test/results/clientpositive/cbo_correctness.q.out index 00f05f2..3335d4d 100644 --- a/ql/src/test/results/clientpositive/cbo_correctness.q.out +++ b/ql/src/test/results/clientpositive/cbo_correctness.q.out @@ -18449,6 +18449,7 @@ having b.p_mfgr not in from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a where min(p_retailprice) = l and r - l > 600 ) + order by b.p_mfgr PREHOOK: type: QUERY PREHOOK: Input: default@part #### A masked pattern was here #### @@ -18463,6 +18464,7 @@ having b.p_mfgr not in from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a where min(p_retailprice) = l and r - l > 600 ) + order by b.p_mfgr POSTHOOK: type: QUERY POSTHOOK: Input: default@part #### A masked pattern was here #### @@ -18478,6 +18480,7 @@ having b.p_mfgr not in group by p_mfgr having max(p_retailprice) - min(p_retailprice) > 600 ) + order by b.p_mfgr PREHOOK: type: QUERY PREHOOK: Input: default@part #### A masked pattern was here #### @@ -18491,11 +18494,12 @@ having b.p_mfgr not in group by p_mfgr having max(p_retailprice) - min(p_retailprice) > 600 ) + order by b.p_mfgr POSTHOOK: type: QUERY POSTHOOK: Input: default@part #### A masked pattern was here #### -Manufacturer#2 1690.68 Manufacturer#1 1173.15 +Manufacturer#2 1690.68 PREHOOK: query: -- 17. SubQueries In -- non agg, non corr select *