diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java index 67b74c2c26..53d13d8c99 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java @@ -30,9 +30,12 @@ import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import java.io.IOException; import java.util.HashMap; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class TestExportImport { @@ -122,4 +125,43 @@ public void databaseTheTableIsImportedIntoShouldBeParsedFromCommandLine() throws .verifyResults(new String[] { "1", "2" }); } + + @Test + public void testExportNonNativeTable() throws Throwable { + String path = "hdfs:///tmp/" + dbName + "/"; + String exportPath = path + "1/"; + String exportMetaPath = exportPath + "/Meta"; + String tableName = testName.getMethodName(); + String createTableQuery = + "CREATE TABLE " + tableName + " ( serde_id bigint COMMENT 'from deserializer', name string " + + "COMMENT 'from deserializer', slib string COMMENT 'from deserializer') " + + "ROW FORMAT SERDE 'org.apache.hive.storage.jdbc.JdbcSerDe' " + + "STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' " + + "WITH SERDEPROPERTIES ('serialization.format'='1') " + + "TBLPROPERTIES ( " + + "'hive.sql.database.type'='METASTORE', " + + "'hive.sql.query'='SELECT \"SERDE_ID\", \"NAME\", \"SLIB\" FROM \"SERDES\"')"; + + srcHiveWarehouse.run("use " + dbName) + .run(createTableQuery) + .runFailure("export table " + tableName + " to '" + exportPath + "'") + .run("export table " + tableName + " to '" + exportMetaPath + "'" + " for metadata replication('1')"); + + destHiveWarehouse.run("use " + replDbName) + .runFailure("import table " + tableName + " from '" + exportPath + "'") + .run("show tables") + .verifyFailure(new String[] {tableName}) + .run("import table " + tableName + " from '" + exportMetaPath + "'") + .run("show tables") + .verifyResult(tableName); + + // check physical path + Path checkPath = new Path(exportPath); + checkPath = new Path(checkPath, EximUtil.DATA_PATH_NAME); + FileSystem fs = checkPath.getFileSystem(srcHiveWarehouse.hiveConf); + assertFalse(fs.exists(checkPath)); + checkPath = new Path(exportMetaPath); + checkPath = new Path(checkPath, EximUtil.METADATA_NAME); + assertTrue(fs.exists(checkPath)); + } } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java index 26e308c05d..0f671741e5 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java @@ -797,7 +797,7 @@ public void testIncrementalReplWithDropAndCreateTableDifferentPartitionTypeAndIn } @Test - public void shouldNotCreateDirectoryForNonNativeTableInDumpDirectory() throws Throwable { + public void testShouldNotCreateDirectoryForNonNativeTableInDumpDirectory() throws Throwable { String createTableQuery = "CREATE TABLE custom_serdes( serde_id bigint COMMENT 'from deserializer', name string " + "COMMENT 'from deserializer', slib string COMMENT 'from deserializer') " @@ -835,6 +835,33 @@ private void verifyIfCkptSet(WarehouseInstance wh, String dbName, String dumpDir } } + @Test + public void testShouldDumpMetaDataForNonNativeTableIfSetMeataDataOnly() throws Throwable { + String tableName = testName.getMethodName() + "_table"; + String createTableQuery = + "CREATE TABLE " + tableName + " ( serde_id bigint COMMENT 'from deserializer', name string " + + "COMMENT 'from deserializer', slib string COMMENT 'from deserializer') " + + "ROW FORMAT SERDE 'org.apache.hive.storage.jdbc.JdbcSerDe' " + + "STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' " + + "WITH SERDEPROPERTIES ('serialization.format'='1') " + + "TBLPROPERTIES ( " + + "'hive.sql.database.type'='METASTORE', " + + "'hive.sql.query'='SELECT \"SERDE_ID\", \"NAME\", \"SLIB\" FROM \"SERDES\"')"; + + WarehouseInstance.Tuple bootstrapTuple = primary + .run("use " + primaryDbName) + .run(createTableQuery) + .dump(primaryDbName, null, Collections.singletonList("'hive.repl.dump.metadata.only'='true'")); + + // Bootstrap load in replica + replica.load(replicatedDbName, bootstrapTuple.dumpLocation) + .status(replicatedDbName) + .verifyResult(bootstrapTuple.lastReplicationId) + .run("use " + replicatedDbName) + .run("show tables") + .verifyResult(tableName); + } + private void verifyIfCkptSet(Map props, String dumpDir) { assertTrue(props.containsKey(ReplUtils.REPL_CHECKPOINT_KEY)); assertTrue(props.get(ReplUtils.REPL_CHECKPOINT_KEY).equals(dumpDir)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/Utils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/Utils.java index 14572ad8ae..e3566077ce 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/Utils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/Utils.java @@ -172,7 +172,8 @@ public static Boolean shouldReplicate(ReplicationSpec replicationSpec, Table tab return false; } - if (tableHandle.isNonNative()) { + // if its metadata only, then dump metadata of non native tables also. + if (tableHandle.isNonNative() && !replicationSpec.isMetadataOnly()) { return false; }