diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index f973f8d..456e80d 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -50,6 +50,10 @@ import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import static org.mockito.Mockito.when; /** * Test HiveAuthorizer api invocation @@ -390,6 +394,40 @@ public void testDescDatabase() throws HiveAuthzPluginException, assertEquals(dbName.toLowerCase(), dbObj.getDbname().toLowerCase()); } + private void resetAuthorizer() throws HiveAuthzPluginException, HiveAccessControlException { + reset(mockedAuthorizer); + // HiveAuthorizer.filterListCmdObjects should not filter any object + when(mockedAuthorizer.filterListCmdObjects(any(List.class), + any(HiveAuthzContext.class))).thenAnswer(new Answer>() { + @Override + public List answer(InvocationOnMock invocation) throws Throwable { + Object[] args = invocation.getArguments(); + return (List) args[0]; + } + }); + } + + @Test + public void testReplDump() throws HiveAuthzPluginException, HiveAccessControlException, + CommandNeedRetryException { + + resetAuthorizer(); + int status = driver.compile("repl dump " + dbName); + assertEquals(0, status); + List inputs = getHivePrivilegeObjectInputs().getLeft(); + HivePrivilegeObject dbObj = inputs.get(0); + assertEquals("input type", HivePrivilegeObjectType.DATABASE, dbObj.getType()); + assertEquals("db name", dbName.toLowerCase(), dbObj.getDbname()); + + resetAuthorizer(); + status = driver.compile("repl dump " + dbName + "." + inDbTableName); + assertEquals(0, status); + inputs = getHivePrivilegeObjectInputs().getLeft(); + dbObj = inputs.get(0); + assertEquals("input type", HivePrivilegeObjectType.TABLE_OR_VIEW, dbObj.getType()); + assertEquals("db name", dbName.toLowerCase(), dbObj.getDbname()); + assertEquals("table name", inDbTableName.toLowerCase(), dbObj.getObjectName()); + } private void checkSingleTableInput(List inputs) { assertEquals("number of inputs", 1, inputs.size()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java index f9bdff8..bcbce7d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java @@ -176,7 +176,7 @@ private ReplicationSpec getNewEventOnlyReplicationSpec(Long eventId) throws Sema private Long bootStrapDump(Path dumpRoot, DumpMetaData dmd, Path cmRoot) throws Exception { // bootstrap case Long bootDumpBeginReplId = getHive().getMSC().getCurrentNotificationEventId().getEventId(); - for (String dbName : matchesDb()) { + for (String dbName : matchesDb(getHive(), work.dbNameOrPattern)) { REPL_STATE_LOG .info("Repl Dump: Started analyzing Repl Dump for DB: {}, Dump Type: BOOTSTRAP", dbName); @@ -184,7 +184,7 @@ private Long bootStrapDump(Path dumpRoot, DumpMetaData dmd, Path cmRoot) throws Path dbRoot = dumpDbMetadata(dbName, dumpRoot); dumpFunctionMetadata(dbName, dumpRoot); - for (String tblName : matchesTbl(dbName, work.tableNameOrPattern)) { + for (String tblName : matchesTbl(getHive(), dbName, work.tableNameOrPattern)) { LOG.debug( "analyzeReplDump dumping table: " + tblName + " to db root " + dbRoot.toUri()); dumpTable(dbName, tblName, dbRoot); @@ -223,17 +223,16 @@ private Long bootStrapDump(Path dumpRoot, DumpMetaData dmd, Path cmRoot) throws return bootDumpEndReplId; } - private Iterable matchesDb() throws HiveException { - if (work.dbNameOrPattern == null) { - return getHive().getAllDatabases(); + static public Iterable matchesDb(Hive db, String dbPattern) throws HiveException { + if (dbPattern == null) { + return db.getAllDatabases(); } else { - return getHive().getDatabasesByPattern(work.dbNameOrPattern); + return db.getDatabasesByPattern(dbPattern); } } - private Iterable matchesTbl(String dbName, String tblPattern) + static public Iterable matchesTbl(Hive db, String dbName, String tblPattern) throws HiveException { - Hive db = Hive.get(); if (tblPattern == null) { return Collections2.filter(db.getAllTables(dbName), tableName -> { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java index 48d9c94..1a426bf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java @@ -23,14 +23,17 @@ Licensed to the Apache Software Foundation (ASF) under one import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.exec.repl.ReplDumpTask; import org.apache.hadoop.hive.ql.exec.repl.ReplDumpWork; import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; @@ -174,6 +177,21 @@ private void analyzeReplDump(ASTNode ast) throws SemanticException { ctx.getResFile().toUri().toString() ), conf); rootTasks.add(replDumpWorkTask); + if (dbNameOrPattern != null) { + for (String dbName : ReplDumpTask.matchesDb(db, dbNameOrPattern)) { + if (tblNameOrPattern != null) { + for (String tblName : ReplDumpTask.matchesTbl(db, dbName, tblNameOrPattern)) { + inputs.add(new ReadEntity(db.getTable(dbName, tblName))); + } + } else { + inputs.add(new ReadEntity(db.getDatabase(dbName))); + } + } + } + if (!inputs.isEmpty()) { + // If we have something to export + outputs.add(toWriteEntity(new Path(conf.getVar(HiveConf.ConfVars.REPLDIR)), conf)); + } setFetchTask(createFetchTask(dumpSchema)); } catch (Exception e) { // TODO : simple wrap & rethrow for now, clean up with error codes