diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java index 323b85a274..0aee0121a8 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java @@ -21,6 +21,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore; +import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore.BehaviourInjection; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -51,6 +53,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.annotation.Nullable; import java.io.File; import java.io.FileWriter; import java.io.IOException; @@ -122,8 +125,9 @@ public static void setUpBeforeClass() throws Exception { hconf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hconf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); hconf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, - "false"); + hconf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + hconf.set(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL.varname, + "org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore"); System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " "); System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " "); @@ -356,6 +360,79 @@ public void testBootstrapLoadOnExistingDb() throws IOException { } @Test + public void testBootstrapWithConcurrentDropTable() throws IOException { + String name = testName.getMethodName(); + String dbName = createDB(name); + run("CREATE TABLE " + dbName + ".unptned(a string) STORED AS TEXTFILE"); + run("CREATE TABLE " + dbName + ".ptned(a string) partitioned by (b int) STORED AS TEXTFILE"); + + String[] unptn_data = new String[]{ "eleven" , "twelve" }; + String[] ptn_data_1 = new String[]{ "thirteen", "fourteen", "fifteen"}; + String[] ptn_data_2 = new String[]{ "fifteen", "sixteen", "seventeen"}; + String[] empty = new String[]{}; + + String unptn_locn = new Path(TEST_PATH, name + "_unptn").toUri().getPath(); + String ptn_locn_1 = new Path(TEST_PATH, name + "_ptn1").toUri().getPath(); + String ptn_locn_2 = new Path(TEST_PATH, name + "_ptn2").toUri().getPath(); + + createTestDataFile(unptn_locn, unptn_data); + createTestDataFile(ptn_locn_1, ptn_data_1); + createTestDataFile(ptn_locn_2, ptn_data_2); + + run("LOAD DATA LOCAL INPATH '" + unptn_locn + "' OVERWRITE INTO TABLE " + dbName + ".unptned"); + verifySetup("SELECT * from " + dbName + ".unptned", unptn_data); + run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + dbName + ".ptned PARTITION(b=1)"); + verifySetup("SELECT a from " + dbName + ".ptned WHERE b=1", ptn_data_1); + run("LOAD DATA LOCAL INPATH '" + ptn_locn_2 + "' OVERWRITE INTO TABLE " + dbName + ".ptned PARTITION(b=2)"); + verifySetup("SELECT a from " + dbName + ".ptned WHERE b=2", ptn_data_2); + + + advanceDumpDir(); + + BehaviourInjection ptnedTableNuller = new BehaviourInjection(){ + @Nullable + @Override + public Table apply(@Nullable Table table) { + if (table.getTableName().equalsIgnoreCase("ptned")){ + injectionPathCalled = true; + return null; + } else { + nonInjectedPathCalled = true; + return table; + } + } + }; + InjectableBehaviourObjectStore.setGetTableBehaviour(ptnedTableNuller); + + // The ptned table will not be dumped as getTable will return null + run("REPL DUMP " + dbName); + ptnedTableNuller.assertInjectionsPerformed(true,true); + InjectableBehaviourObjectStore.resetGetTableBehaviour(); // reset the behaviour + + String replDumpLocn = getResult(0, 0); + String replDumpId = getResult(0, 1, true); + LOG.info("Bootstrap-Dump: Dumped to {} with id {}", replDumpLocn, replDumpId); + run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'"); + + // The ptned table should miss in target as the table was marked cisrtually as dropped + verifyRun("SELECT * from " + dbName + "_dupe.unptned", unptn_data); + verifyFail("SELECT a from " + dbName + "_dupe.ptned WHERE b=1"); + + // Verify if Drop table on a non-existing table is idempotent + run("DROP TABLE " + dbName + ".ptned"); + + advanceDumpDir(); + run("REPL DUMP " + dbName + " FROM " + replDumpId); + String postDropReplDumpLocn = getResult(0,0); + String postDropReplDumpId = getResult(0,1,true); + LOG.info("Dumped to {} with id {}->{}", postDropReplDumpLocn, replDumpId, postDropReplDumpId); + assert(run("REPL LOAD " + dbName + "_dupe FROM '" + postDropReplDumpLocn + "'", true)); + + verifyRun("SELECT * from " + dbName + "_dupe.unptned", unptn_data); + verifyFail("SELECT a from " + dbName + "_dupe.ptned WHERE b=1"); + } + + @Test public void testIncrementalAdds() throws IOException { String name = testName.getMethodName(); String dbName = createDB(name); diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/InjectableBehaviourObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/InjectableBehaviourObjectStore.java new file mode 100644 index 0000000000..a832c78403 --- /dev/null +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/InjectableBehaviourObjectStore.java @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore; + +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Table; + +import static org.junit.Assert.assertEquals; + + +/** + * A wrapper around {@link ObjectStore} that allows us to inject custom behaviour + * on to some of the methods for testing. + */ +public class InjectableBehaviourObjectStore extends ObjectStore { + public InjectableBehaviourObjectStore() { + super(); + } + + /** + * A utility class that allows people injecting behaviour to determine if their injections occurred. + */ + public static abstract class BehaviourInjection + implements com.google.common.base.Function{ + protected boolean injectionPathCalled = false; + protected boolean nonInjectedPathCalled = false; + + public void assertInjectionsPerformed( + boolean expectedInjectionCalled, boolean expectedNonInjectedPathCalled){ + assertEquals(expectedInjectionCalled, injectionPathCalled); + assertEquals(expectedNonInjectedPathCalled, nonInjectedPathCalled); + } + }; + + private static com.google.common.base.Function getTableModifier = + com.google.common.base.Functions.identity(); + + public static void setGetTableBehaviour(com.google.common.base.Function modifier){ + getTableModifier = (modifier == null)? com.google.common.base.Functions.identity() : modifier; + } + + public static void resetGetTableBehaviour(){ + setGetTableBehaviour(null); + } + + public static com.google.common.base.Function getGetTableBehaviour() { + return getTableModifier; + } + + @Override + public Table getTable(String dbName, String tableName) throws MetaException { + return getTableModifier.apply(super.getTable(dbName, tableName)); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java index caa9e00d47..1e6b192beb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java @@ -43,6 +43,7 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.repl.DumpType; import org.apache.hadoop.hive.ql.parse.repl.dump.HiveWrapper; @@ -408,6 +409,11 @@ private Path dumpTbl(ASTNode ast, String dbName, String tblName, Path dbRoot) th rootTasks, inputs, outputs, LOG); REPL_STATE_LOG.info("Repl Dump: Analyzed dump for table/view: {}.{} and created copy tasks to dump metadata " + "and data to path {}", dbName, tblName, toURI.toString()); + } catch (InvalidTableException te) { + // Bootstrap dump shouldn't fail if the table is dropped/renamed while dumping it. + // Just log a debug message and skip it. + LOG.debug(te.getMessage()); + return null; } catch (HiveException e) { // TODO : simple wrap & rethrow for now, clean up with error codes throw new SemanticException(e);