diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java index 3ee8490e74..af5746ff48 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java @@ -84,7 +84,9 @@ public void testCreateFunctionIncrementalReplication() throws Throwable { primary.run("CREATE FUNCTION " + primaryDbName + ".testFunctionOne as 'hivemall.tools.string.StopwordUDF' " - + "using jar 'ivy://io.github.myui:hivemall:0.4.0-2'"); + + "using jar 'ivy://io.github.myui:hivemall:0.4.0-2'") + .run("CREATE FUNCTION " + primaryDbName + + ".testFunctionTwo as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMax'"); WarehouseInstance.Tuple incrementalDump = primary.dump(primaryDbName, bootStrapDump.lastReplicationId); @@ -92,14 +94,16 @@ public void testCreateFunctionIncrementalReplication() throws Throwable { .run("REPL STATUS " + replicatedDbName) .verifyResult(incrementalDump.lastReplicationId) .run("SHOW FUNCTIONS LIKE '" + replicatedDbName + "%'") - .verifyResult(replicatedDbName + ".testFunctionOne"); + .verifyResults(new String[] { replicatedDbName + ".testFunctionOne", + replicatedDbName + ".testFunctionTwo" }); // Test the idempotent behavior of CREATE FUNCTION replica.load(replicatedDbName, incrementalDump.dumpLocation) .run("REPL STATUS " + replicatedDbName) .verifyResult(incrementalDump.lastReplicationId) .run("SHOW FUNCTIONS LIKE '" + replicatedDbName + "%'") - .verifyResult(replicatedDbName + ".testFunctionOne"); + .verifyResults(new String[] { replicatedDbName + ".testFunctionOne", + replicatedDbName + ".testFunctionTwo" }); } @Test diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java index 6258c9e30c..576eb0699a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java @@ -51,18 +51,20 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi throws SemanticException, IOException, MetaException { TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); List resourceUris = new ArrayList<>(); - for (ResourceUri uri : function.getResourceUris()) { - Path inputPath = new Path(uri.getUri()); - if ("hdfs".equals(inputPath.toUri().getScheme())) { - FileSystem fileSystem = inputPath.getFileSystem(hiveConf); - Path qualifiedUri = PathBuilder.fullyQualifiedHDFSUri(inputPath, fileSystem); - // Initialize ReplChangeManager instance since we will require it to encode file URI. - ReplChangeManager.getInstance(hiveConf); - String checkSum = ReplChangeManager.checksumFor(qualifiedUri, fileSystem); - String newFileUri = ReplChangeManager.encodeFileUri(qualifiedUri.toString(), checkSum, null); - resourceUris.add(new ResourceUri(uri.getResourceType(), newFileUri)); - } else { - resourceUris.add(uri); + if (function.getResourceUris() != null) { + for (ResourceUri uri : function.getResourceUris()) { + Path inputPath = new Path(uri.getUri()); + if ("hdfs".equals(inputPath.toUri().getScheme())) { + FileSystem fileSystem = inputPath.getFileSystem(hiveConf); + Path qualifiedUri = PathBuilder.fullyQualifiedHDFSUri(inputPath, fileSystem); + // Initialize ReplChangeManager instance since we will require it to encode file URI. + ReplChangeManager.getInstance(hiveConf); + String checkSum = ReplChangeManager.checksumFor(qualifiedUri, fileSystem); + String newFileUri = ReplChangeManager.encodeFileUri(qualifiedUri.toString(), checkSum, null); + resourceUris.add(new ResourceUri(uri.getResourceType(), newFileUri)); + } else { + resourceUris.add(uri); + } } } Function copyObj = new Function(this.function); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java index c5a0519c74..bc891f710a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java @@ -129,9 +129,9 @@ private CreateFunctionDesc build() throws SemanticException { // and not do them lazily. The reason being the function class used for transformations additionally // also creates the corresponding replCopyTasks, which cannot be evaluated lazily. since the query // plan needs to be complete before we execute and not modify it while execution in the driver. - List transformedUris = ImmutableList.copyOf( - Lists.transform(metadata.function.getResourceUris(), conversionFunction) - ); + List transformedUris = (metadata.function.getResourceUris() == null) + ? null + : ImmutableList.copyOf(Lists.transform(metadata.function.getResourceUris(), conversionFunction)); replCopyTasks.addAll(conversionFunction.replCopyTasks); String fullQualifiedFunctionName = FunctionUtils.qualifyFunctionName( metadata.function.getFunctionName(), destinationDbName