diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index f4cdf02c97..862140f0ed 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -3162,6 +3162,10 @@ public void testDumpNonReplDatabase() throws IOException { String dbName = createDBNonRepl(testName.getMethodName(), driver); verifyFail("REPL DUMP " + dbName, driver); verifyFail("REPL DUMP " + dbName + " from 1 ", driver); + assertTrue(run("REPL DUMP " + dbName + " with ('hive.repl.dump.metadata.only' = 'true')", + true, driver)); + assertTrue(run("REPL DUMP " + dbName + " from 1 with ('hive.repl.dump.metadata.only' = 'true')", + true, driver)); run("alter database " + dbName + " set dbproperties ('repl.source.for' = '1, 2, 3')", driver); assertTrue(run("REPL DUMP " + dbName, true, driver)); assertTrue(run("REPL DUMP " + dbName + " from 1 ", true, driver)); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java index 35437b14da..a35b5ccd18 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java @@ -399,6 +399,42 @@ public void testIncrementalMetadataReplication() throws Throwable { } @Test + public void testNonReplDBMetadataReplication() throws Throwable { + String dbName = primaryDbName + "_metadata"; + WarehouseInstance.Tuple tuple = primary + .run("create database " + dbName) + .run("use " + dbName) + .run("create table table1 (i int, j int)") + .run("create table table2 (a int, city string) partitioned by (country string)") + .run("create table table3 (i int, j int)") + .run("insert into table1 values (1,2)") + .dump(dbName, null, Arrays.asList("'hive.repl.dump.metadata.only'='true'")); + + replica.load(replicatedDbName, tuple.dumpLocation) + .run("use " + replicatedDbName) + .run("show tables") + .verifyResults(new String[]{"table1", "table2", "table3"}) + .run("select * from table1") + .verifyResults(Collections.emptyList()); + + tuple = primary + .run("use " + dbName) + .run("alter table table1 rename to renamed_table1") + .run("insert into table2 partition(country='india') values (1,'mumbai') ") + .run("create table table4 (i int, j int)") + .dump(dbName, tuple.lastReplicationId, Arrays.asList("'hive.repl.dump.metadata.only'='true'")); + + replica.load(replicatedDbName, tuple.dumpLocation) + .run("use " + replicatedDbName) + .run("show tables") + .verifyResults(new String[] { "renamed_table1", "table2", "table3", "table4" }) + .run("select * from renamed_table1") + .verifyResults(Collections.emptyList()) + .run("select * from table2") + .verifyResults(Collections.emptyList()); + } + + @Test public void testBootStrapDumpOfWarehouse() throws Throwable { String randomOne = RandomStringUtils.random(10, true, false); String randomTwo = RandomStringUtils.random(10, true, false); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java index b913f697f7..c300dc529b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java @@ -136,20 +136,9 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { private void initReplDump(ASTNode ast) throws HiveException { int numChildren = ast.getChildCount(); + boolean isMetaDataOnly = false; dbNameOrPattern = PlanUtils.stripQuotes(ast.getChild(0).getText()); - for (String dbName : Utils.matchesDb(db, dbNameOrPattern)) { - Database database = db.getDatabase(dbName); - if (database != null) { - if (!ReplChangeManager.isSourceOfReplication(database)) { - throw new SemanticException("Cannot dump database " + dbNameOrPattern + - " as it is not a source of replication"); - } - } else { - throw new SemanticException("Cannot dump database " + dbNameOrPattern + " as it does not exist"); - } - } - // skip the first node, which is always required int currNode = 1; while (currNode < numChildren) { @@ -159,6 +148,10 @@ private void initReplDump(ASTNode ast) throws HiveException { if (null != replConfigs) { for (Map.Entry config : replConfigs.entrySet()) { conf.set(config.getKey(), config.getValue()); + if ("hive.repl.dump.metadata.only".equalsIgnoreCase(config.getKey()) && + "true".equalsIgnoreCase(config.getValue())) { + isMetaDataOnly = true; + } } } } else if (ast.getChild(currNode).getType() == TOK_TABNAME) { @@ -189,6 +182,18 @@ private void initReplDump(ASTNode ast) throws HiveException { // move to the next root node currNode++; } + + for (String dbName : Utils.matchesDb(db, dbNameOrPattern)) { + Database database = db.getDatabase(dbName); + if (database != null) { + if (!ReplChangeManager.isSourceOfReplication(database) && !isMetaDataOnly) { + throw new SemanticException("Cannot dump database " + dbName + + " as it is not a source of replication"); + } + } else { + throw new SemanticException("Cannot dump database " + dbName + " as it does not exist"); + } + } } // REPL DUMP