diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestTableLevelReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestTableLevelReplicationScenarios.java index 4d47254e0c..b2297d2110 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestTableLevelReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestTableLevelReplicationScenarios.java @@ -156,7 +156,7 @@ private String replicateAndVerify(String replPolicy, String oldReplPolicy, Strin replica.run("drop database if exists " + replicatedDbName + " cascade"); } - WarehouseInstance.Tuple tuple = primary.dump(replPolicy, oldReplPolicy, dumpWithClause); + WarehouseInstance.Tuple tuple = primary.dump(replPolicy, dumpWithClause); DumpMetaData dumpMetaData = new DumpMetaData(new Path(tuple.dumpLocation, ReplUtils.REPL_HIVE_BASE_DIR), conf); Assert.assertEquals(oldReplPolicy != null && !replPolicy.equals(oldReplPolicy), @@ -221,7 +221,7 @@ private String replicateAndVerifyClearDump(String replPolicy, String oldReplPoli replica.run("drop database if exists " + replicatedDbName + " cascade"); } - WarehouseInstance.Tuple tuple = primary.dump(replPolicy, oldReplPolicy, dumpWithClause); + WarehouseInstance.Tuple tuple = primary.dump(replPolicy, dumpWithClause); if (bootstrappedTables != null) { verifyBootstrapDirInIncrementalDump(tuple.dumpLocation, bootstrappedTables); @@ -708,7 +708,7 @@ public void testReplacePolicyOnBootstrapExternalTablesIncrementalPhase() throws dumpWithClause = Arrays.asList("'" + HiveConf.ConfVars.REPL_INCLUDE_EXTERNAL_TABLES.varname + "'='true'", "'" + HiveConf.ConfVars.REPL_BOOTSTRAP_EXTERNAL_TABLES.varname + "'='true'"); WarehouseInstance.Tuple tuple = primary.run("use " + primaryDbName) - .dump(replPolicy, oldReplPolicy, dumpWithClause); + .dump(replPolicy, dumpWithClause); loadWithClause = ReplicationTestUtils.includeExternalTableClause(true); String hiveDumpDir = tuple.dumpLocation + File.separator + ReplUtils.REPL_HIVE_BASE_DIR; diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java index ed12478bdd..89e535d42a 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java @@ -268,27 +268,16 @@ Tuple dump(String dbName) return dump(dbName, Collections.emptyList()); } - Tuple dump(String dbName, List withClauseOptions) + Tuple dump(String dumpExpression, List withClauseOptions) throws Throwable { String dumpCommand = - "REPL DUMP " + dbName; + "REPL DUMP " + dumpExpression; if (withClauseOptions != null && !withClauseOptions.isEmpty()) { dumpCommand += " with (" + StringUtils.join(withClauseOptions, ",") + ")"; } return dumpWithCommand(dumpCommand); } - Tuple dump(String replPolicy, String oldReplPolicy, List withClauseOptions) - throws Throwable { - String dumpCommand = - "REPL DUMP " + replPolicy - + (oldReplPolicy == null ? "" : " REPLACE " + oldReplPolicy); - if (!withClauseOptions.isEmpty()) { - dumpCommand += " with (" + StringUtils.join(withClauseOptions, ",") + ")"; - } - return dumpWithCommand(dumpCommand); - } - Tuple dumpWithCommand(String dumpCommand) throws Throwable { advanceDumpDir(); run(dumpCommand); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java index 7e690fce35..e5d459240c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java @@ -163,6 +163,9 @@ public int execute() { Path previousValidHiveDumpPath = getPreviousValidDumpMetadataPath(dumpRoot); boolean isBootstrap = (previousValidHiveDumpPath == null); work.setBootstrap(isBootstrap); + if (previousValidHiveDumpPath != null) { + work.setOldReplScope(new DumpMetaData(previousValidHiveDumpPath, conf).getReplScope()); + } //If no previous dump is present or previous dump is already loaded, proceed with the dump operation. if (shouldDump(previousValidHiveDumpPath)) { Path currentDumpPath = getCurrentDumpPath(dumpRoot, isBootstrap); @@ -410,12 +413,16 @@ private boolean shouldDump(Path previousDumpPath) throws IOException { * @return true if need to examine tables for dump and false if not. */ private boolean shouldExamineTablesToDump() { - return (work.oldReplScope != null) + return (previousReplScopeModified()) || !tablesForBootstrap.isEmpty() || conf.getBoolVar(HiveConf.ConfVars.REPL_BOOTSTRAP_ACID_TABLES) || conf.getBoolVar(HiveConf.ConfVars.REPL_INCLUDE_EXTERNAL_TABLES); } + private boolean previousReplScopeModified() { + return work.oldReplScope != null && !work.oldReplScope.equals(work.replScope); + } + /** * Decide whether to dump external tables data. If external tables are enabled for replication, * then need to dump it's data in all the incremental dumps. @@ -762,7 +769,7 @@ private boolean needBootstrapAcidTablesDuringIncrementalDump() { // may not satisfying the old policy but satisfying the new policy. For filter, it may happen that the table // is renamed and started satisfying the policy. return ((!work.replScope.includeAllTables()) - || (work.oldReplScope != null) + || (previousReplScopeModified()) || conf.getBoolVar(HiveConf.ConfVars.REPL_BOOTSTRAP_ACID_TABLES)); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpWork.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpWork.java index aecfe757a4..47051e41ee 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpWork.java @@ -24,7 +24,6 @@ import org.apache.hadoop.hive.ql.exec.ReplCopyTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; -import org.apache.hadoop.hive.ql.exec.repl.util.FileList; import org.apache.hadoop.hive.ql.exec.repl.util.TaskTracker; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.parse.EximUtil; @@ -47,7 +46,7 @@ private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(ReplDumpWork.class); final ReplScope replScope; - final ReplScope oldReplScope; + ReplScope oldReplScope; final String dbNameOrPattern, astRepresentationForErrorMsg, resultTempPath; Long eventTo; Long eventFrom; @@ -88,16 +87,19 @@ public static void testDeletePreviousDumpMetaPath(boolean failDeleteDumpMeta) { testDeletePreviousDumpMetaPath = failDeleteDumpMeta; } - public ReplDumpWork(ReplScope replScope, ReplScope oldReplScope, + public ReplDumpWork(ReplScope replScope, String astRepresentationForErrorMsg, String resultTempPath) { this.replScope = replScope; - this.oldReplScope = oldReplScope; this.dbNameOrPattern = replScope.getDbName(); this.astRepresentationForErrorMsg = astRepresentationForErrorMsg; this.resultTempPath = resultTempPath; } + void setOldReplScope(ReplScope replScope) { + oldReplScope = replScope; + } + int maxEventLimit() throws Exception { if (eventTo < eventFrom) { throw new Exception("Invalid event ID input received in TO clause"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java index ed408b0c1c..60ada73ae9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java @@ -65,7 +65,6 @@ public class ReplicationSemanticAnalyzer extends BaseSemanticAnalyzer { // Replication Scope private ReplScope replScope = new ReplScope(); - private ReplScope oldReplScope = null; // Source DB Name for REPL LOAD private String sourceDbNameOrPattern; @@ -143,34 +142,6 @@ private void setReplDumpTablesList(Tree replTablesNode, ReplScope replScope) thr } } - private void setOldReplPolicy(Tree oldReplPolicyTree) throws HiveException { - oldReplScope = new ReplScope(); - int childCount = oldReplPolicyTree.getChildCount(); - - // First child is DB name and optional second child is tables list. - assert(childCount <= 2); - - // First child is always the DB name. So set it. - oldReplScope.setDbName(oldReplPolicyTree.getChild(0).getText()); - LOG.info("Old ReplScope: Set DB Name: {}", oldReplScope.getDbName()); - if (!oldReplScope.getDbName().equalsIgnoreCase(replScope.getDbName())) { - LOG.error("DB name {} cannot be replaced to {} in the replication policy.", - oldReplScope.getDbName(), replScope.getDbName()); - throw new SemanticException("DB name cannot be replaced in the replication policy."); - } - - // If the old policy is just , then tables list won't be there. - if (childCount <= 1) { - return; - } - - // Traverse the children which can be either just include tables list or both include - // and exclude tables lists. - Tree oldPolicyTablesListNode = oldReplPolicyTree.getChild(1); - assert(oldPolicyTablesListNode.getType() == TOK_REPL_TABLES); - setReplDumpTablesList(oldPolicyTablesListNode, oldReplScope); - } - private void initReplDump(ASTNode ast) throws HiveException { int numChildren = ast.getChildCount(); boolean isMetaDataOnly = false; @@ -196,9 +167,6 @@ private void initReplDump(ASTNode ast) throws HiveException { case TOK_REPL_TABLES: setReplDumpTablesList(currNode, replScope); break; - case TOK_REPLACE: - setOldReplPolicy(currNode); - break; default: throw new SemanticException("Unrecognized token " + currNode.getType() + " in REPL DUMP statement."); } @@ -237,7 +205,6 @@ private void analyzeReplDump(ASTNode ast) throws SemanticException { Task replDumpWorkTask = TaskFactory .get(new ReplDumpWork( replScope, - oldReplScope, ASTErrorUtils.getMsg(ErrorMsg.INVALID_PATH.getMsg(), ast), ctx.getResFile().toUri().toString() ), conf); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java index 3f47678988..aede902d4a 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java @@ -147,8 +147,7 @@ void dumpTable(String dbName, String tblName, String validTxnList, }; task.initialize(queryState, null, null, null); - ReplDumpWork replDumpWork = new ReplDumpWork(replScope, - null, "", ""); + ReplDumpWork replDumpWork = new ReplDumpWork(replScope, "", ""); replDumpWork.setMetricCollector(metricCollector); task.setWork(replDumpWork); diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/common/repl/ReplScope.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/common/repl/ReplScope.java index 8b26f1ce40..a9d65aec05 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/common/repl/ReplScope.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/common/repl/ReplScope.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.common.repl; import java.io.Serializable; +import java.util.Objects; import java.util.regex.Pattern; /** @@ -116,4 +117,27 @@ private boolean inTableExcludedList(final String tableName) { } return excludedTableNamePattern.matcher(tableName).matches(); } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (! (o instanceof ReplScope)) { + return false; + } + + ReplScope replScope = (ReplScope)o; + return ((replScope.getExcludedTableNames() == null ? (this.excludedTableNames == null) : + replScope.getExcludedTableNames().equals(this.excludedTableNames)) + && (replScope.getIncludedTableNames() == null ? (this.includedTableNames == null) : + replScope.getIncludedTableNames().equals(this.includedTableNames)) + && replScope.dbName == null ? (this.dbName == null) : replScope.dbName.equals(this.dbName) + && replScope.dbNamePattern == null ? (this.dbNamePattern == null) : replScope.dbNamePattern.equals(this.dbNamePattern)); + } + + @Override + public int hashCode() { + return Objects.hash(dbName, dbNamePattern, includedTableNames, excludedTableNames); + } }