diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index b3c6806217..aef1d4ab2a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -3322,6 +3322,7 @@ private boolean genSubQueryRelNode(QB qb, ASTNode node, RelNode srcRel, boolean } String sbQueryAlias = "sq_" + qb.incrNumSubQueryPredicates(); QB qbSQ = new QB(qb.getId(), sbQueryAlias, true); + qbSQ.setInsideView(qb.isInsideView()); Phase1Ctx ctx1 = initPhase1Ctx(); doPhase1((ASTNode) next.getChild(1), qbSQ, ctx1, null); getMetaData(qbSQ); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 1a2777bf45..04e9552b8d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -3381,6 +3381,7 @@ private Operator genFilterPlan(ASTNode searchCond, QB qb, Operator input, subQuery.validateAndRewriteAST(inputRR, forHavingClause, havingInputAlias, aliasToOpInfo.keySet()); QB qbSQ = new QB(subQuery.getOuterQueryId(), subQuery.getAlias(), true); + qbSQ.setInsideView(qb.isInsideView()); Operator sqPlanTopOp = genPlanForSubQueryPredicate(qbSQ, subQuery); aliasToOpInfo.put(subQuery.getAlias(), sqPlanTopOp); RowResolver sqRR = opParseCtx.get(sqPlanTopOp).getRowResolver(); diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java index 6ad38b8467..6d86ca200d 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java @@ -138,6 +138,91 @@ public void testViewInSubQuery() throws Exception { } + + /** + * Verify that the parent entities are captured correctly for view in subquery with WHERE + * subquery referencing a view. Optimizer: Cost-based + * @throws Exception + */ + @Test + public void testViewInSubQueryWithWhereClauseCbo() throws Exception { + driver.getConf().setBoolVar(HiveConf.ConfVars.HIVE_CBO_ENABLED, true); + testViewInSubQueryWithWhereClause(); + } + + /** + * Verify that the parent entities are captured correctly for view in subquery with WHERE + * subquery referencing a view. Optimizer: Rule-based + * + * @throws Exception + */ + @Test + public void testViewInSubQueryWithWhereClauseRbo() throws Exception { + driver.getConf().setBoolVar(HiveConf.ConfVars.HIVE_CBO_ENABLED, false); + testViewInSubQueryWithWhereClause(); + } + + private void testViewInSubQueryWithWhereClause() { + String prefix = "tvsubquerywithwhereclause" + NAME_PREFIX; + final String tab1 = prefix + "t"; + final String view1 = prefix + "v"; + final String view2 = prefix + "v2"; + final String tab1row1 = "'x','y','z'"; + final String tab1row2 = "'a','b','c'"; + + //drop all if exists + int ret = driver.run("drop table if exists " + tab1).getResponseCode(); + assertEquals("Checking command success", 0, ret); + ret = driver.run("drop view if exists " + view1).getResponseCode(); + assertEquals("Checking command success", 0, ret); + ret = driver.run("drop view if exists " + view2).getResponseCode(); + assertEquals("Checking command success", 0, ret); + + //create tab1 + ret = driver.run("create table " + tab1 + "(col1 string, col2 string, col3 string)") + .getResponseCode(); + assertEquals("Checking command success", 0, ret); + ret = driver.run("insert into " + tab1 + " values (" + tab1row1 + ")").getResponseCode(); + assertEquals("Checking command success", 0, ret); + + //create view1 + ret = driver.run("create view " + view1 + " as select " + + tab1 + ".col1, " + tab1 + ".col2, " + tab1 + ".col3 " + + " from " + tab1).getResponseCode(); + assertEquals("Checking command success", 0, ret); + + ret = driver.run("insert into " + tab1 + " values (" + tab1row2 + ")").getResponseCode(); + assertEquals("Checking command success", 0, ret); + + //create view2 + ret = driver.run( + "create view " + view2 + " as select " + + tab1 + ".col1, " + tab1 + ".col2, " + tab1 + ".col3 " + + " from " + tab1 + + " where " + tab1 + ".col1 NOT IN (" + + "SELECT " + view1 + ".col1 FROM " + view1 + ")").getResponseCode(); + assertEquals("Checking command success", 0, ret); + + //select from view2 + driver.compile("select * from " + view2); + + //verify that only view2 is direct input in above query + ReadEntity[] readEntities = CheckInputReadEntity.readEntities; + for (ReadEntity readEntity : readEntities) { + String name = readEntity.getName(); + if (name.equals("default@" + tab1)) { + assertFalse("Table should not be direct input", readEntity.isDirect()); + } else if (name.equals("default@" + view1)) { + assertFalse("View1 should not be direct input", readEntity.isDirect()); + } else if (name.equals("default@" + view2)) { + assertTrue("View2 should be direct input", readEntity.isDirect()); + } else { + fail("Unrecognized ReadEntity input"); + } + } + } + + /** * Verify that the the query with the subquery inside a view will have the correct * direct and indirect inputs. diff --git ql/src/test/results/clientpositive/llap/explainuser_1.q.out ql/src/test/results/clientpositive/llap/explainuser_1.q.out index c86450aae2..6882963599 100644 --- ql/src/test/results/clientpositive/llap/explainuser_1.q.out +++ ql/src/test/results/clientpositive/llap/explainuser_1.q.out @@ -2180,7 +2180,7 @@ Stage-0 Filter Operator [FIL_16] (rows=166 width=178) predicate:((value > 'val_9') and key is not null) TableScan [TS_3] (rows=500 width=178) - default@src_cbo,a,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"] + default@src_cbo,a,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"],properties:{"insideView":"TRUE"} PREHOOK: query: explain select * from (select * diff --git ql/src/test/results/clientpositive/masking_12.q.out ql/src/test/results/clientpositive/masking_12.q.out index 9ecd981797..4beb206052 100644 --- ql/src/test/results/clientpositive/masking_12.q.out +++ ql/src/test/results/clientpositive/masking_12.q.out @@ -179,6 +179,8 @@ STAGE PLANS: TableScan alias: src filterExpr: key is not null (type: boolean) + properties: + insideView TRUE Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) diff --git ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out index 1f681944cd..3e395a84ec 100644 --- ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out +++ ql/src/test/results/clientpositive/spark/spark_explainuser_1.q.out @@ -2139,7 +2139,7 @@ Stage-0 Filter Operator [FIL_14] (rows=166 width=178) predicate:((value > 'val_9') and key is not null) TableScan [TS_3] (rows=500 width=178) - default@src_cbo,a,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"] + default@src_cbo,a,Tbl:COMPLETE,Col:COMPLETE,Output:["key","value"],properties:{"insideView":"TRUE"} PREHOOK: query: explain select * from (select * diff --git ql/src/test/results/clientpositive/spark/subquery_views.q.out ql/src/test/results/clientpositive/spark/subquery_views.q.out index c221392264..30bcf4b43c 100644 --- ql/src/test/results/clientpositive/spark/subquery_views.q.out +++ ql/src/test/results/clientpositive/spark/subquery_views.q.out @@ -167,6 +167,8 @@ STAGE PLANS: TableScan alias: a filterExpr: ((value > 'val_11') and (key < '11')) (type: boolean) + properties: + insideView TRUE Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key < '11') and (value > 'val_11')) (type: boolean) @@ -189,6 +191,8 @@ STAGE PLANS: TableScan alias: a filterExpr: ((value > 'val_11') and (key < '11')) (type: boolean) + properties: + insideView TRUE Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key < '11') and (value > 'val_11')) (type: boolean)