diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index 8e93932..6494ba0 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -60,6 +60,7 @@ protected static Driver driver; private static final String tableName = TestHiveAuthorizerCheckInvocation.class.getSimpleName() + "Table"; + private static final String inDbTableName = tableName + "_in_db"; private static final String acidTableName = tableName + "_acid"; private static final String dbName = TestHiveAuthorizerCheckInvocation.class.getSimpleName() + "Db"; @@ -96,6 +97,7 @@ public static void beforeTest() throws Exception { runCmd("create table " + tableName + " (i int, j int, k string) partitioned by (city string, `date` string) "); runCmd("create database " + dbName); + runCmd("create table " + dbName + "." + inDbTableName + "(i int)"); // Need a separate table for ACID testing since it has to be bucketed and it has to be Acid runCmd("create table " + acidTableName + " (i int, j int, k int) clustered by (k) into 2 buckets " + "stored as orc TBLPROPERTIES ('transactional'='true')"); @@ -111,7 +113,8 @@ public static void afterTests() throws Exception { // Drop the tables when we're done. This makes the test work inside an IDE runCmd("drop table if exists " + acidTableName); runCmd("drop table if exists " + tableName); - runCmd("drop database if exists " + dbName); + runCmd("drop table if exists " + dbName + "." + inDbTableName); + runCmd("drop database if exists " + dbName ); driver.close(); } @@ -314,6 +317,35 @@ public void testDelete() throws HiveAuthzPluginException, assertEquals("j", tableObj.getColumns().get(0)); } + @Test + public void testShowTables() throws HiveAuthzPluginException, + HiveAccessControlException, CommandNeedRetryException { + reset(mockedAuthorizer); + int status = driver.compile("show tables"); + assertEquals(0, status); + + Pair, List> io = getHivePrivilegeObjectInputs(); + List inputs = io.getLeft(); + assertEquals(1, inputs.size()); + HivePrivilegeObject dbObj = inputs.get(0); + assertEquals("default", dbObj.getDbname().toLowerCase()); + } + + @Test + public void testDescDatabase() throws HiveAuthzPluginException, + HiveAccessControlException, CommandNeedRetryException { + reset(mockedAuthorizer); + int status = driver.compile("describe database " + dbName); + assertEquals(0, status); + + Pair, List> io = getHivePrivilegeObjectInputs(); + List inputs = io.getLeft(); + assertEquals(1, inputs.size()); + HivePrivilegeObject dbObj = inputs.get(0); + assertEquals(dbName.toLowerCase(), dbObj.getDbname().toLowerCase()); + } + + private void checkSingleTableInput(List inputs) { assertEquals("number of inputs", 1, inputs.size()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index bc69d23..24ca663 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -2068,6 +2068,7 @@ private void analyzeDescDatabase(ASTNode ast) throws SemanticException { DescDatabaseDesc descDbDesc = new DescDatabaseDesc(ctx.getResFile(), dbName, isExtended); + inputs.add(new ReadEntity(getDatabase(dbName))); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descDbDesc), conf)); setFetchTask(createFetchTask(descDbDesc.getSchema())); } @@ -2178,7 +2179,7 @@ private void analyzeShowTables(ASTNode ast) throws SemanticException { showTblsDesc = new ShowTablesDesc(ctx.getResFile(), dbName); break; } - + inputs.add(new ReadEntity(getDatabase(dbName))); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showTblsDesc), conf)); setFetchTask(createFetchTask(showTblsDesc.getSchema())); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java index 7267756..31357c4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java @@ -152,6 +152,11 @@ private void checkPrivileges(HiveOperationType hiveOpType, List filterListCmdObjects(List listObjs, HiveAuthzContext context) { + if (LOG.isDebugEnabled()) { + String msg = "Obtained following objects in filterListCmdObjects " + listObjs + " for user " + + authenticator.getUserName() + ". Context Info: " + context; + LOG.debug(msg); + } return listObjs; }