diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index acf2663..5e601c9 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -60,6 +60,8 @@ protected static Driver driver; private static final String tableName = TestHiveAuthorizerCheckInvocation.class.getSimpleName() + "Table"; + private static final String viewName = TestHiveAuthorizerCheckInvocation.class.getSimpleName() + + "View"; private static final String inDbTableName = tableName + "_in_db"; private static final String acidTableName = tableName + "_acid"; private static final String dbName = TestHiveAuthorizerCheckInvocation.class.getSimpleName() @@ -97,6 +99,7 @@ public static void beforeTest() throws Exception { driver = new Driver(conf); runCmd("create table " + tableName + " (i int, j int, k string) partitioned by (city string, `date` string) "); + runCmd("create view " + viewName + " as select * from " + tableName); runCmd("create database " + dbName); runCmd("create table " + dbName + "." + inDbTableName + "(i int)"); // Need a separate table for ACID testing since it has to be bucketed and it has to be Acid @@ -114,6 +117,7 @@ public static void afterTests() throws Exception { // Drop the tables when we're done. This makes the test work inside an IDE runCmd("drop table if exists " + acidTableName); runCmd("drop table if exists " + tableName); + runCmd("drop table if exists " + viewName); runCmd("drop table if exists " + dbName + "." + inDbTableName); runCmd("drop database if exists " + dbName ); driver.close(); @@ -136,6 +140,46 @@ public void testInputSomeColumnsUsed() throws HiveAuthzPluginException, HiveAcce getSortedList(tableObj.getColumns())); } + @Test + public void testInputSomeColumnsUsedView() throws HiveAuthzPluginException, HiveAccessControlException, + CommandNeedRetryException { + + reset(mockedAuthorizer); + int status = driver.compile("select i from " + viewName + + " where k = 'X' and city = 'Scottsdale-AZ' "); + assertEquals(0, status); + + List inputs = getHivePrivilegeObjectInputs().getLeft(); + checkSingleViewInput(inputs); + HivePrivilegeObject tableObj = inputs.get(0); + assertEquals("no of columns used", 3, tableObj.getColumns().size()); + assertEquals("Columns used", Arrays.asList("city", "i", "k"), + getSortedList(tableObj.getColumns())); + } + + @Test + public void testInputSomeColumnsUsedJoin() throws HiveAuthzPluginException, HiveAccessControlException, + CommandNeedRetryException { + + reset(mockedAuthorizer); + int status = driver.compile("select " + viewName + ".i, " + tableName + ".city from " + + viewName + " join " + tableName + " on " + viewName + ".city = " + tableName + + ".city where " + tableName + ".k = 'X'"); + assertEquals(0, status); + + List inputs = getHivePrivilegeObjectInputs().getLeft(); + Collections.sort(inputs); + assertEquals(inputs.size(), 2); + HivePrivilegeObject tableObj = inputs.get(0); + assertEquals(tableObj.getObjectName().toLowerCase(), tableName.toLowerCase()); + assertEquals("no of columns used", 2, tableObj.getColumns().size()); + assertEquals("Columns used", Arrays.asList("city", "k"), getSortedList(tableObj.getColumns())); + tableObj = inputs.get(1); + assertEquals(tableObj.getObjectName().toLowerCase(), viewName.toLowerCase()); + assertEquals("no of columns used", 2, tableObj.getColumns().size()); + assertEquals("Columns used", Arrays.asList("city", "i"), getSortedList(tableObj.getColumns())); + } + private List getSortedList(List columns) { List sortedCols = new ArrayList(columns); Collections.sort(sortedCols); @@ -355,6 +399,14 @@ private void checkSingleTableInput(List inputs) { assertTrue("table name", tableName.equalsIgnoreCase(tableObj.getObjectName())); } + private void checkSingleViewInput(List inputs) { + assertEquals("number of inputs", 1, inputs.size()); + + HivePrivilegeObject tableObj = inputs.get(0); + assertEquals("input type", HivePrivilegeObjectType.TABLE_OR_VIEW, tableObj.getType()); + assertTrue("table name", viewName.equalsIgnoreCase(tableObj.getObjectName())); + } + /** * @return pair with left value as inputs and right value as outputs, * passed in current call to authorizer.checkPrivileges diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 65744ac..dceebe2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -743,7 +743,7 @@ public static void doAuthorization(BaseSemanticAnalyzer sem, String command) Table tbl = read.getTable(); if (tbl.isView() && sem instanceof SemanticAnalyzer) { tab2Cols.put(tbl, - sem.getColumnAccessInfo().getTableToColumnAccessMap().get(tbl.getTableName())); + sem.getColumnAccessInfo().getTableToColumnAccessMap().get(tbl.getCompleteName())); } if (read.getPartition() != null) { Partition partition = read.getPartition(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java index 7638ba0..a2a7f00 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java @@ -790,7 +790,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, int index = originalOutputColumnNames.indexOf(col); Table tab = cppCtx.getParseContext().getViewProjectToTableSchema().get(op); cppCtx.getParseContext().getColumnAccessInfo() - .add(tab.getTableName(), tab.getCols().get(index).getName()); + .add(tab.getCompleteName(), tab.getCols().get(index).getName()); } } if (cols.size() < originalOutputColumnNames.size()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveRelFieldTrimmer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveRelFieldTrimmer.java index 03002cc..b0cb8df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveRelFieldTrimmer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveRelFieldTrimmer.java @@ -387,7 +387,7 @@ public TrimResult trimFields(Project project, ImmutableBitSet fieldsUsed, if (this.columnAccessInfo != null && this.viewProjectToTableSchema != null && this.viewProjectToTableSchema.containsKey(project)) { Table tab = this.viewProjectToTableSchema.get(project); - this.columnAccessInfo.add(tab.getTableName(), tab.getCols().get(ord.i).getName()); + this.columnAccessInfo.add(tab.getCompleteName(), tab.getCols().get(ord.i).getName()); } } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java index dcc8daf..777734b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java @@ -34,21 +34,26 @@ public ColumnAccessAnalyzer(ParseContext pactx) { pGraphContext = pactx; } - public ColumnAccessInfo analyzeColumnAccess() throws SemanticException { - ColumnAccessInfo columnAccessInfo = new ColumnAccessInfo(); + public ColumnAccessInfo analyzeColumnAccess(ColumnAccessInfo columnAccessInfo) throws SemanticException { + if (columnAccessInfo == null) { + columnAccessInfo = new ColumnAccessInfo(); + } Collection topOps = pGraphContext.getTopOps().values(); for (TableScanOperator top : topOps) { - Table table = top.getConf().getTableMetadata(); - String tableName = table.getCompleteName(); - List referenced = top.getReferencedColumns(); - for (String column : referenced) { - columnAccessInfo.add(tableName, column); - } - if (table.isPartitioned()) { - PrunedPartitionList parts = pGraphContext.getPrunedPartitions(table.getTableName(), top); - if (parts.getReferredPartCols() != null) { - for (String partKey : parts.getReferredPartCols()) { - columnAccessInfo.add(tableName, partKey); + // if a table is inside view, we do not care about its authorization. + if (!top.isInsideView()) { + Table table = top.getConf().getTableMetadata(); + String tableName = table.getCompleteName(); + List referenced = top.getReferencedColumns(); + for (String column : referenced) { + columnAccessInfo.add(tableName, column); + } + if (table.isPartitioned()) { + PrunedPartitionList parts = pGraphContext.getPrunedPartitions(table.getTableName(), top); + if (parts.getReferredPartCols() != null) { + for (String partKey : parts.getReferredPartCols()) { + columnAccessInfo.add(tableName, partKey); + } } } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 96df189..9d764db 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -10732,7 +10732,8 @@ void analyzeInternal(ASTNode ast, PlannerContext plannerCtx) throws SemanticExce if (isColumnInfoNeedForAuth || HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS)) { ColumnAccessAnalyzer columnAccessAnalyzer = new ColumnAccessAnalyzer(pCtx); - setColumnAccessInfo(columnAccessAnalyzer.analyzeColumnAccess()); + // view column access info is carried by this.getColumnAccessInfo(). + setColumnAccessInfo(columnAccessAnalyzer.analyzeColumnAccess(this.getColumnAccessInfo())); } // 9. Optimize Physical op tree & Translate to target execution engine (MR,