diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g index af9fba6f36..563da72c9c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g @@ -21,9 +21,18 @@ package org.apache.hadoop.hive.ql.parse; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; +import java.util.Arrays; +import java.util.Set; +import java.util.HashSet; +import java.util.Collections; } @lexer::members { + public static final Set orderedSetAggregateFunctions = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + "percentile_cont", + "percentile_disc" + ))); + private Configuration hiveConf; public void setHiveConf(Configuration hiveConf) { @@ -385,6 +394,7 @@ KW_SYNC: 'SYNC'; KW_AST: 'AST'; KW_COST: 'COST'; KW_JOINCOST: 'JOINCOST'; +KW_WITHIN: 'WITHIN'; // Operators // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work. diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g index 1935d3f3fc..56585cf06b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g @@ -224,8 +224,10 @@ function (STAR) => (star=STAR) | (dist=KW_DISTINCT | KW_ALL)? (selectExpression (COMMA selectExpression)*)? ) - RPAREN (KW_OVER ws=window_specification)? + RPAREN (KW_OVER ws=window_specification)? (within=KW_WITHIN KW_GROUP LPAREN KW_ORDER KW_BY colRef=columnRefOrder RPAREN)? -> {$star != null}? ^(TOK_FUNCTIONSTAR functionName $ws?) + -> {$within != null && HiveLexer.orderedSetAggregateFunctions.contains($functionName.text.toLowerCase())}? + ^(TOK_FUNCTION functionName {$colRef.tree.getChild(0).getChild(0)} (selectExpression+)? NumberLiteral[Integer.toString($colRef.tree.getType())]) -> {$dist == null}? ^(TOK_FUNCTION functionName (selectExpression+)? $ws?) -> ^(TOK_FUNCTIONDI functionName (selectExpression+)? $ws?) ; @@ -867,6 +869,7 @@ nonReserved | KW_RESOURCE | KW_PLAN | KW_PLANS | KW_QUERY_PARALLELISM | KW_ACTIVATE | KW_MOVE | KW_DO | KW_POOL | KW_ALLOC_FRACTION | KW_SCHEDULING_POLICY | KW_PATH | KW_MAPPING | KW_WORKLOAD | KW_MANAGEMENT | KW_ACTIVE | KW_UNMANAGED | KW_UNKNOWN + | KW_WITHIN ; //The following SQL2011 reserved keywords are used as function name only, but not as identifiers. diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileCont.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileCont.java index 72a19bd386..745d6d93b2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileCont.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileCont.java @@ -20,7 +20,6 @@ import java.io.Serializable; import java.util.ArrayList; -import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; @@ -31,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.serde2.io.DoubleWritable; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -43,9 +43,11 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.LongWritable; /** @@ -70,14 +72,8 @@ public int compare(DoubleWritable o1, DoubleWritable o2) { @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { - if (parameters.length != 2) { - throw new UDFArgumentTypeException(parameters.length - 1, "Exactly 2 argument is expected."); - } + validateParameterTypes(parameters); - if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) { - throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " - + parameters[0].getTypeName() + " is passed."); - } switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) { case BYTE: case SHORT: @@ -101,6 +97,20 @@ public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticE } } + protected void validateParameterTypes(TypeInfo[] parameters) throws UDFArgumentTypeException { + if (parameters.length < 2) { + throw new UDFArgumentTypeException(parameters.length - 1, "Not enough arguments."); + } + if (parameters.length > 3) { + throw new UDFArgumentTypeException(parameters.length - 1, "Too many arguments."); + } + + if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) { + throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + + parameters[0].getTypeName() + " is passed."); + } + } + /** * A comparator to sort the entries in order - Long. */ @@ -137,12 +147,17 @@ public int compare(Map.Entry o1, public abstract static class PercentileContEvaluator extends GenericUDAFEvaluator { PercentileCalculator calc = getCalculator(); + protected PercentileContEvaluator(Comparator> comparator) { + this.comparator = comparator; + } + /** * A state class to store intermediate aggregation results. */ public class PercentileAgg extends AbstractAggregationBuffer { Map counts; List percentiles; + boolean isAscending; } // For PARTIAL1 and COMPLETE @@ -160,6 +175,10 @@ public int compare(Map.Entry o1, protected transient StructObjectInspector soi; protected transient StructField countsField; protected transient StructField percentilesField; + protected transient StructField isAscendingField; + + private final transient Comparator> comparator; + protected transient boolean isAscending; public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException { super.init(m, parameters); @@ -167,13 +186,14 @@ public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveExc initInspectors(parameters); if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// ...for partial result - partialResult = new Object[2]; + partialResult = new Object[3]; ArrayList foi = getPartialInspectors(); ArrayList fname = new ArrayList(); fname.add("counts"); fname.add("percentiles"); + fname.add("isAscending"); return ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi); } else { // ...for final result @@ -192,16 +212,29 @@ public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveExc protected abstract U copyInput(U input); - protected abstract void sortEntries(List> entriesList); + private void sortEntries(List> entriesList, boolean isAscending) { + entriesList.sort(isAscending ? comparator : comparator.reversed()); + } protected void initInspectors(ObjectInspector[] parameters) { if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// ...for real input data inputOI = (PrimitiveObjectInspector) parameters[0]; + if (parameters.length == 2) { // Order direction was not given, default to asc + isAscending = true; + } + else { + int orderDirectionToken = ((WritableConstantIntObjectInspector) parameters[2]).getWritableConstantValue().get(); + if (orderDirectionToken != HiveParser.TOK_TABSORTCOLNAMEASC && orderDirectionToken != HiveParser.TOK_TABSORTCOLNAMEDESC) { + throw new RuntimeException("Invalid order direction token: " + orderDirectionToken); + } + isAscending = orderDirectionToken == HiveParser.TOK_TABSORTCOLNAMEASC; + } } else { // ...for partial result as input soi = (StructObjectInspector) parameters[0]; countsField = soi.getStructFieldRef("counts"); percentilesField = soi.getStructFieldRef("percentiles"); + isAscendingField = soi.getStructFieldRef("isAscending"); countsOI = (MapObjectInspector) countsField.getFieldObjectInspector(); percentilesOI = (ListObjectInspector) percentilesField.getFieldObjectInspector(); @@ -211,6 +244,7 @@ protected void initInspectors(ObjectInspector[] parameters) { @Override public AggregationBuffer getNewAggregationBuffer() throws HiveException { PercentileAgg agg = new PercentileAgg(); + agg.isAscending = isAscending; return agg; } @@ -264,6 +298,7 @@ public void merge(AggregationBuffer agg, Object partial) throws HiveException { Object objCounts = soi.getStructFieldData(partial, countsField); Object objPercentiles = soi.getStructFieldData(partial, percentilesField); + Object objIsAscending = soi.getStructFieldData(partial, isAscendingField); Map counts = (Map) countsOI.getMap(objCounts); List percentiles = @@ -278,6 +313,7 @@ public void merge(AggregationBuffer agg, Object partial) throws HiveException { if (percAgg.percentiles == null) { percAgg.percentiles = new ArrayList(percentiles); } + percAgg.isAscending = ((BooleanWritable)objIsAscending).get(); for (Map.Entry e : counts.entrySet()) { increment(percAgg, e.getKey(), e.getValue().get()); @@ -297,7 +333,7 @@ public Object terminate(AggregationBuffer agg) throws HiveException { Set> entries = percAgg.counts.entrySet(); List> entriesList = new ArrayList>(entries); - sortEntries(entriesList); + sortEntries(entriesList, percAgg.isAscending); // Accumulate the counts. long total = getTotal(entriesList); @@ -317,6 +353,7 @@ public Object terminatePartial(AggregationBuffer agg) throws HiveException { PercentileAgg percAgg = (PercentileAgg) agg; partialResult[0] = percAgg.counts; partialResult[1] = percAgg.percentiles; + partialResult[2] = new BooleanWritable(percAgg.isAscending); return partialResult; } @@ -353,6 +390,10 @@ protected void calculatePercentile(PercentileAgg percAgg, public static class PercentileContLongEvaluator extends PercentileContEvaluator { + public PercentileContLongEvaluator() { + super(new LongComparator()); + } + protected ArrayList getPartialInspectors() { ArrayList foi = new ArrayList(); @@ -361,6 +402,7 @@ protected void calculatePercentile(PercentileAgg percAgg, PrimitiveObjectInspectorFactory.writableLongObjectInspector)); foi.add(ObjectInspectorFactory.getStandardListObjectInspector( PrimitiveObjectInspectorFactory.writableDoubleObjectInspector)); + foi.add(PrimitiveObjectInspectorFactory.writableBooleanObjectInspector); return foi; } @@ -376,10 +418,6 @@ protected LongWritable copyInput(LongWritable input) { return new LongWritable(input.get()); } - protected void sortEntries(List> entriesList) { - Collections.sort(entriesList, new LongComparator()); - } - @Override protected PercentileCalculator getCalculator() { return new PercentileContLongCalculator(); @@ -391,6 +429,10 @@ protected void sortEntries(List> entriesList) */ public static class PercentileContDoubleEvaluator extends PercentileContEvaluator { + public PercentileContDoubleEvaluator() { + super(new DoubleComparator()); + } + @Override protected ArrayList getPartialInspectors() { ArrayList foi = new ArrayList(); @@ -400,6 +442,7 @@ protected void sortEntries(List> entriesList) PrimitiveObjectInspectorFactory.writableLongObjectInspector)); foi.add(ObjectInspectorFactory.getStandardListObjectInspector( PrimitiveObjectInspectorFactory.writableDoubleObjectInspector)); + foi.add(PrimitiveObjectInspectorFactory.writableBooleanObjectInspector); return foi; } @@ -417,10 +460,6 @@ protected DoubleWritable copyInput(DoubleWritable input) { return new DoubleWritable(input.get()); } - protected void sortEntries(List> entriesList) { - Collections.sort(entriesList, new DoubleComparator()); - } - @Override protected PercentileCalculator getCalculator() { return new PercentileContDoubleCalculator(); diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileDisc.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileDisc.java index 3ac336e68a..47fd428da6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileDisc.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFPercentileDisc.java @@ -25,7 +25,6 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.serde2.io.DoubleWritable; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.io.LongWritable; @@ -39,14 +38,8 @@ @Override public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters) throws SemanticException { - if (parameters.length != 2) { - throw new UDFArgumentTypeException(parameters.length - 1, "Exactly 2 argument is expected."); - } + validateParameterTypes(parameters); - if (parameters[0].getCategory() != ObjectInspector.Category.PRIMITIVE) { - throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " - + parameters[0].getTypeName() + " is passed."); - } switch (((PrimitiveTypeInfo) parameters[0]).getPrimitiveCategory()) { case BYTE: case SHORT: diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java index f3372af5d6..77aa0d5ffa 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseDriver.java @@ -303,5 +303,4 @@ public void testFromSubqueryIsSetop() throws Exception { System.out.println(root.dump()); } - } \ No newline at end of file diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java new file mode 100644 index 0000000000..276307e40c --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestParseWithinGroupClause.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.parse; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class TestParseWithinGroupClause { + ParseDriver parseDriver = new ParseDriver(); + + @Test + public void testParsePercentileCont() throws Exception { + ASTNode tree = parseDriver.parseSelect("SELECT percentile_cont(0.4) WITHIN GROUP (ORDER BY val) FROM src", null); + + assertEquals(1, tree.getChildCount()); + ASTNode selExpr = (ASTNode) tree.getChild(0); + assertEquals(1, selExpr.getChildCount()); + ASTNode function = (ASTNode) selExpr.getChild(0); + assertEquals(HiveParser.TOK_FUNCTION, function.getType()); + assertEquals(4, function.getChildCount()); + + ASTNode functionName = (ASTNode) function.getChild(0); + assertEquals(HiveParser.Identifier, functionName.getType()); + assertEquals("percentile_cont", functionName.getText()); + + ASTNode tableOrCol = (ASTNode) function.getChild(1); + assertEquals(HiveParser.TOK_TABLE_OR_COL, tableOrCol.getType()); + ASTNode val = (ASTNode) tableOrCol.getChild(0); + assertEquals(HiveParser.Identifier, val.getType()); + assertEquals("val", val.getText()); + + ASTNode fraction = (ASTNode) function.getChild(2); + assertEquals(HiveParser.Number, fraction.getType()); + assertEquals("0.4", fraction.getText()); + + ASTNode orderDirection = (ASTNode) function.getChild(3); + assertEquals(HiveParser.NumberLiteral, orderDirection.getType()); + assertEquals(Integer.toString(HiveParser.TOK_TABSORTCOLNAMEASC), orderDirection.getText()); + } + + @Test + public void testParsePercentileContAsc() throws Exception { + ASTNode tree = parseDriver.parseSelect("SELECT percentile_cont(0.4) WITHIN GROUP (ORDER BY val ASC) FROM src", null); + ASTNode selExpr = (ASTNode) tree.getChild(0); + ASTNode function = (ASTNode) selExpr.getChild(0); + ASTNode orderDirection = (ASTNode) function.getChild(3); + assertEquals(Integer.toString(HiveParser.TOK_TABSORTCOLNAMEASC), orderDirection.getText()); + } + + @Test + public void testParsePercentileContDesc() throws Exception { + ASTNode tree = parseDriver.parseSelect("SELECT percentile_cont(0.4) WITHIN GROUP (ORDER BY val DESC) FROM src", null); + ASTNode selExpr = (ASTNode) tree.getChild(0); + ASTNode function = (ASTNode) selExpr.getChild(0); + ASTNode orderDirection = (ASTNode) function.getChild(3); + assertEquals(Integer.toString(HiveParser.TOK_TABSORTCOLNAMEDESC), orderDirection.getText()); + } + + @Test + public void testNonOrderedSetAggregateFunction() throws Exception { + ASTNode tree = parseDriver.parseSelect("SELECT count(1) WITHIN GROUP (ORDER BY val) FROM src", null); + + assertEquals(1, tree.getChildCount()); + ASTNode selExpr = (ASTNode) tree.getChild(0); + assertEquals(1, selExpr.getChildCount()); + ASTNode function = (ASTNode) selExpr.getChild(0); + assertEquals(HiveParser.TOK_FUNCTION, function.getType()); + assertEquals(2, function.getChildCount()); + + ASTNode functionName = (ASTNode) function.getChild(0); + assertEquals(HiveParser.Identifier, functionName.getType()); + assertEquals("count", functionName.getText()); + + ASTNode funcParam = (ASTNode) function.getChild(1); + assertEquals(HiveParser.Number, funcParam.getType()); + assertEquals("1", funcParam.getText()); + } +} diff --git ql/src/test/queries/clientpositive/udaf_percentile_cont.q ql/src/test/queries/clientpositive/udaf_percentile_cont.q index 6d788c1fc9..02e39c3f31 100644 --- ql/src/test/queries/clientpositive/udaf_percentile_cont.q +++ ql/src/test/queries/clientpositive/udaf_percentile_cont.q @@ -67,3 +67,19 @@ FROM src GROUP BY CAST(key AS INT) DIV 10; select percentile_cont(cast(key as bigint), 0.5) from src where false; + + +CREATE TABLE t_test (value int); +INSERT INTO t_test VALUES (3), (8), (13), (6), (20), (10), (7), (15), (16), (8); + +SELECT +percentile_cont(value, 0.0), +percentile_cont(value, 0.2), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value) = percentile_cont(value, 0.2), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value ASC), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value ASC) = percentile_cont(value, 0.2), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value DESC) +FROM t_test; + +DROP TABLE t_test; diff --git ql/src/test/queries/clientpositive/udaf_percentile_disc.q ql/src/test/queries/clientpositive/udaf_percentile_disc.q index 7ba703eecc..c3f2b00e5b 100644 --- ql/src/test/queries/clientpositive/udaf_percentile_disc.q +++ ql/src/test/queries/clientpositive/udaf_percentile_disc.q @@ -67,3 +67,19 @@ FROM src GROUP BY CAST(key AS INT) DIV 10; select percentile_disc(cast(key as bigint), 0.5) from src where false; + + +CREATE TABLE t_test (value int); +INSERT INTO t_test VALUES (3), (8), (13), (6), (20), (10), (7), (15), (16), (8); + +SELECT + percentile_disc(value, 0.0), + percentile_disc(value, 0.2), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value) = percentile_disc(value, 0.2), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value ASC), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value ASC) = percentile_disc(value, 0.2), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value DESC) +FROM t_test; + +DROP TABLE t_test; diff --git ql/src/test/results/clientpositive/udaf_percentile_cont.q.out ql/src/test/results/clientpositive/udaf_percentile_cont.q.out index dda6ce55c2..234faee7fe 100644 --- ql/src/test/results/clientpositive/udaf_percentile_cont.q.out +++ ql/src/test/results/clientpositive/udaf_percentile_cont.q.out @@ -419,3 +419,53 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL +PREHOOK: query: CREATE TABLE t_test (value int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t_test +POSTHOOK: query: CREATE TABLE t_test (value int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t_test +PREHOOK: query: INSERT INTO t_test VALUES (3), (8), (13), (6), (20), (10), (7), (15), (16), (8) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@t_test +POSTHOOK: query: INSERT INTO t_test VALUES (3), (8), (13), (6), (20), (10), (7), (15), (16), (8) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@t_test +POSTHOOK: Lineage: t_test.value SCRIPT [] +PREHOOK: query: SELECT +percentile_cont(value, 0.0), +percentile_cont(value, 0.2), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value) = percentile_cont(value, 0.2), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value ASC), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value ASC) = percentile_cont(value, 0.2), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value DESC) +FROM t_test +PREHOOK: type: QUERY +PREHOOK: Input: default@t_test +#### A masked pattern was here #### +POSTHOOK: query: SELECT +percentile_cont(value, 0.0), +percentile_cont(value, 0.2), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value) = percentile_cont(value, 0.2), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value ASC), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value ASC) = percentile_cont(value, 0.2), +percentile_cont(0.2) WITHIN GROUP (ORDER BY value DESC) +FROM t_test +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t_test +#### A masked pattern was here #### +3.0 6.800000000000001 6.800000000000001 true 6.800000000000001 true 15.2 +PREHOOK: query: DROP TABLE t_test +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@t_test +PREHOOK: Output: default@t_test +POSTHOOK: query: DROP TABLE t_test +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@t_test +POSTHOOK: Output: default@t_test diff --git ql/src/test/results/clientpositive/udaf_percentile_disc.q.out ql/src/test/results/clientpositive/udaf_percentile_disc.q.out index 75fa36cc36..8b5de837dd 100644 --- ql/src/test/results/clientpositive/udaf_percentile_disc.q.out +++ ql/src/test/results/clientpositive/udaf_percentile_disc.q.out @@ -419,3 +419,53 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src #### A masked pattern was here #### NULL +PREHOOK: query: CREATE TABLE t_test (value int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t_test +POSTHOOK: query: CREATE TABLE t_test (value int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t_test +PREHOOK: query: INSERT INTO t_test VALUES (3), (8), (13), (6), (20), (10), (7), (15), (16), (8) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: default@t_test +POSTHOOK: query: INSERT INTO t_test VALUES (3), (8), (13), (6), (20), (10), (7), (15), (16), (8) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: default@t_test +POSTHOOK: Lineage: t_test.value SCRIPT [] +PREHOOK: query: SELECT + percentile_disc(value, 0.0), + percentile_disc(value, 0.2), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value) = percentile_disc(value, 0.2), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value ASC), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value ASC) = percentile_disc(value, 0.2), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value DESC) +FROM t_test +PREHOOK: type: QUERY +PREHOOK: Input: default@t_test +#### A masked pattern was here #### +POSTHOOK: query: SELECT + percentile_disc(value, 0.0), + percentile_disc(value, 0.2), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value) = percentile_disc(value, 0.2), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value ASC), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value ASC) = percentile_disc(value, 0.2), + percentile_disc(0.2) WITHIN GROUP (ORDER BY value DESC) +FROM t_test +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t_test +#### A masked pattern was here #### +3.0 7.0 7.0 true 7.0 true 15.0 +PREHOOK: query: DROP TABLE t_test +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@t_test +PREHOOK: Output: default@t_test +POSTHOOK: query: DROP TABLE t_test +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@t_test +POSTHOOK: Output: default@t_test