diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index 22094c0563..40c34bfa14 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.repl.dump.io.DBSerializer; import org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter; import org.apache.hadoop.hive.ql.parse.repl.dump.io.ReplicationSpecSerializer; @@ -385,4 +386,33 @@ public boolean accept(Path p) { }; } + /** + * Verify if a table should be exported or not + */ + public static Boolean shouldExportTable(ReplicationSpec replicationSpec, Table tableHandle) throws SemanticException { + if (replicationSpec == null) + { + replicationSpec = new ReplicationSpec(); + } + + if (replicationSpec.isNoop()) + { + return false; + } + + if (tableHandle == null) + { + return false; + } + + if (replicationSpec.isInReplicationScope()) { + return !(tableHandle == null || tableHandle.isTemporary() || tableHandle.isNonNative()); + } + + if (tableHandle.isNonNative()) { + throw new SemanticException(ErrorMsg.EXIM_FOR_NON_NATIVE.getMsg()); + } + + return true; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index 606a414906..1cad6c39f6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -197,6 +197,11 @@ public static boolean prepareImport( throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e); } + if (rv.getTable() == null) { + // nothing to do here, silently return. + return false; + } + ReplicationSpec replicationSpec = rv.getReplicationSpec(); if (replicationSpec.isNoop()){ // nothing to do here, silently return. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java index 5eae35a7f0..88c33283fa 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/TableExport.java @@ -158,12 +158,7 @@ private void writeData(PartitionIterable partitions) throws SemanticException { } private boolean shouldExport() throws SemanticException { - if (replicationSpec.isInReplicationScope()) { - return !(tableSpec.tableHandle.isTemporary() || tableSpec.tableHandle.isNonNative()); - } else if (tableSpec.tableHandle.isNonNative()) { - throw new SemanticException(ErrorMsg.EXIM_FOR_NON_NATIVE.getMsg()); - } - return true; + return EximUtil.shouldExportTable(replicationSpec, tableSpec.tableHandle); } /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateTableHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateTableHandler.java index 8737d502ee..ef6f340013 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateTableHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateTableHandler.java @@ -23,13 +23,12 @@ import org.apache.hadoop.hive.metastore.messaging.CreateTableMessage; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.EximUtil; +import org.apache.hadoop.hive.ql.parse.repl.DumpType; import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStreamWriter; -import org.apache.hadoop.hive.ql.parse.repl.DumpType; - class CreateTableHandler extends AbstractEventHandler { CreateTableHandler(NotificationEvent event) { @@ -48,6 +47,11 @@ public void handle(Context withinContext) throws Exception { } Table qlMdTable = new Table(tobj); + + if (!EximUtil.shouldExportTable(withinContext.replicationSpec, qlMdTable)) { + return; + } + if (qlMdTable.isView()) { withinContext.replicationSpec.setIsMetadataOnly(true); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java index c443e5308a..762a95a690 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/TableSerializer.java @@ -21,6 +21,7 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.parse.EximUtil; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.thrift.TException; @@ -44,7 +45,7 @@ public TableSerializer(org.apache.hadoop.hive.ql.metadata.Table tableHandle, @Override public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvider) throws SemanticException, IOException { - if (cannotReplicateTable(additionalPropertiesProvider)) { + if (!EximUtil.shouldExportTable(additionalPropertiesProvider, tableHandle)) { return; } @@ -61,10 +62,6 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi } } - private boolean cannotReplicateTable(ReplicationSpec additionalPropertiesProvider) { - return tableHandle == null || additionalPropertiesProvider.isNoop(); - } - private Table addPropertiesToTable(Table table, ReplicationSpec additionalPropertiesProvider) throws SemanticException, IOException { if (additionalPropertiesProvider.isInReplicationScope()) {