diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSQCountCheck.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSQCountCheck.java index 89fa0de..f5d9f82 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSQCountCheck.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSQCountCheck.java @@ -61,11 +61,8 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen @Override public Object evaluate(DeferredObject[] arguments) throws HiveException { - Object valObject = arguments[0].get(); - assert(valObject != null); Long val = getLongValue(arguments, 0, converters); - assert(val >= 0); switch (arguments.length){ case 1: //Scalar queries, should expect value/count less than 1 @@ -75,7 +72,13 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { } break; case 2: - if (val == 0) { // IN/NOT IN subqueries with aggregate + Object valObject = arguments[0].get(); + if( valObject != null + && getLongValue(arguments, 0, converters) == 0){ + throw new UDFArgumentException( + " IN/NOT IN subquery with aggregate returning zero result. Currently this is not supported."); + } + else if(valObject == null) { throw new UDFArgumentException( " IN/NOT IN subquery with aggregate returning zero result. Currently this is not supported."); } diff --git a/ql/src/test/queries/clientnegative/subquery_corr_in_agg.q b/ql/src/test/queries/clientnegative/subquery_corr_in_agg.q new file mode 100644 index 0000000..f677fba --- /dev/null +++ b/ql/src/test/queries/clientnegative/subquery_corr_in_agg.q @@ -0,0 +1,8 @@ +create table Part1 (PNum int, OrderOnHand int); +insert into Part1 values (3,6),(10,1),(8,0); +create table Supply (PNum int, Qty int); +insert into Supply values (3,4),(3,2),(10,1); + + +select pnum from Part1 p where OrderOnHand in + (select count(*) from Supply s where s.pnum = p.pnum); diff --git a/ql/src/test/results/clientnegative/subquery_corr_in_agg.q.out b/ql/src/test/results/clientnegative/subquery_corr_in_agg.q.out new file mode 100644 index 0000000..36019cb --- /dev/null +++ b/ql/src/test/results/clientnegative/subquery_corr_in_agg.q.out @@ -0,0 +1,39 @@ +PREHOOK: query: create table Part1 (PNum int, OrderOnHand int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@Part1 +POSTHOOK: query: create table Part1 (PNum int, OrderOnHand int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@Part1 +PREHOOK: query: insert into Part1 values (3,6),(10,1),(8,0) +PREHOOK: type: QUERY +PREHOOK: Output: default@part1 +POSTHOOK: query: insert into Part1 values (3,6),(10,1),(8,0) +POSTHOOK: type: QUERY +POSTHOOK: Output: default@part1 +POSTHOOK: Lineage: part1.orderonhand EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: part1.pnum EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +PREHOOK: query: create table Supply (PNum int, Qty int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@Supply +POSTHOOK: query: create table Supply (PNum int, Qty int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@Supply +PREHOOK: query: insert into Supply values (3,4),(3,2),(10,1) +PREHOOK: type: QUERY +PREHOOK: Output: default@supply +POSTHOOK: query: insert into Supply values (3,4),(3,2),(10,1) +POSTHOOK: type: QUERY +POSTHOOK: Output: default@supply +POSTHOOK: Lineage: supply.pnum EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: supply.qty EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +PREHOOK: query: select pnum from Part1 p where OrderOnHand in + (select count(*) from Supply s where s.pnum = p.pnum) +PREHOOK: type: QUERY +PREHOOK: Input: default@part1 +PREHOOK: Input: default@supply +#### A masked pattern was here #### +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask