diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java index d7b360cd93..3f53e7f48e 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java @@ -114,6 +114,42 @@ public void testCreateFunctionIncrementalReplication() throws Throwable { replicatedDbName + ".testFunctionTwo" }); } + @Test + public void testCreateFunctionOnHDFSIncrementalReplication() throws Throwable { + Path identityUdfLocalPath = new Path("../../data/files/identity_udf.jar"); + Path identityUdf1HdfsPath = new Path(primary.functionsRoot, "idFunc1" + File.separator + "identity_udf1.jar"); + Path identityUdf2HdfsPath = new Path(primary.functionsRoot, "idFunc2" + File.separator + "identity_udf2.jar"); + setupUDFJarOnHDFS(identityUdfLocalPath, identityUdf1HdfsPath); + setupUDFJarOnHDFS(identityUdfLocalPath, identityUdf2HdfsPath); + + primary.run("CREATE FUNCTION " + primaryDbName + + ".idFunc1 as 'IdentityStringUDF' " + + "using jar '" + identityUdf1HdfsPath.toString() + "'"); + WarehouseInstance.Tuple bootStrapDump = primary.dump(primaryDbName); + replica.load(replicatedDbName, primaryDbName) + .run("REPL STATUS " + replicatedDbName) + .verifyResult(bootStrapDump.lastReplicationId) + .run("SHOW FUNCTIONS LIKE '" + replicatedDbName + "%'") + .verifyResults(new String[] { replicatedDbName + ".idFunc1"}) + .run("SELECT " + replicatedDbName + ".idFunc1('MyName')") + .verifyResults(new String[] { "MyName"}); + + primary.run("CREATE FUNCTION " + primaryDbName + + ".idFunc2 as 'IdentityStringUDF' " + + "using jar '" + identityUdf2HdfsPath.toString() + "'"); + + WarehouseInstance.Tuple incrementalDump = + primary.dump(primaryDbName); + replica.load(replicatedDbName, primaryDbName) + .run("REPL STATUS " + replicatedDbName) + .verifyResult(incrementalDump.lastReplicationId) + .run("SHOW FUNCTIONS LIKE '" + replicatedDbName + "%'") + .verifyResults(new String[] { replicatedDbName + ".idFunc1", + replicatedDbName + ".idFunc2" }) + .run("SELECT " + replicatedDbName + ".idFunc2('YourName')") + .verifyResults(new String[] { "YourName"}); + } + @Test public void testBootstrapReplLoadRetryAfterFailureForFunctions() throws Throwable { String funcName1 = "f1"; @@ -1683,4 +1719,9 @@ private void ensureFailedReplOperation(List clause, String conf, boolean private String quote(String str) { return "'" + str + "'"; } + + private void setupUDFJarOnHDFS(Path identityUdfLocalPath, Path identityUdfHdfsPath) throws IOException { + FileSystem fs = primary.miniDFSCluster.getFileSystem(); + fs.copyFromLocalFile(identityUdfLocalPath, identityUdfHdfsPath); + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java index c11f58219c..5ffc110c42 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java @@ -91,7 +91,7 @@ protected int copyOnePath(Path fromPath, Path toPath) { Utilities.FILE_OP_LOGGER.debug("Copying file {} to {}", oneSrcPathStr, toPath); if (!FileUtils.copy(srcFs, oneSrc.getPath(), dstFs, toPath, false, // delete source - true, // overwrite destination + work.isOverwrite(), // overwrite destination conf)) { console.printError("Failed to copy: '" + oneSrcPathStr + "to: '" + toPath.toString() + "'"); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java index b15b326b1c..934c4d97ab 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpTask.java @@ -244,6 +244,7 @@ private void initiateDataCopyTasks() throws SemanticException { } childTasks.addAll(work.externalTableCopyTasks(taskTracker, conf)); childTasks.addAll(work.managedTableCopyTasks(taskTracker, conf)); + childTasks.addAll(work.functionsBinariesCopyTasks(taskTracker, conf)); if (childTasks.isEmpty()) { //All table data copy work finished. finishRemainingTasks(); @@ -790,6 +791,7 @@ Long bootStrapDump(Path dumpRoot, DumpMetaData dmd, Path cmRoot, Hive hiveDb) //We can't reuse the previous write id as it might be invalid due to compaction metadataPath.getFileSystem(conf).delete(metadataPath, true); } + List functionsBinaryCopyPaths = Collections.emptyList(); for (String dbName : Utils.matchesDb(hiveDb, work.dbNameOrPattern)) { LOG.debug("Dumping db: " + dbName); // TODO : Currently we don't support separate table list for each database. @@ -813,8 +815,7 @@ Long bootStrapDump(Path dumpRoot, DumpMetaData dmd, Path cmRoot, Hive hiveDb) work.getMetricCollector().reportStageStart(getName(), metricMap); Path dbRoot = dumpDbMetadata(dbName, metadataPath, bootDumpBeginReplId, hiveDb); Path dbDataRoot = new Path(new Path(dumpRoot, EximUtil.DATA_PATH_NAME), dbName); - dumpFunctionMetadata(dbName, dbRoot, hiveDb); - + functionsBinaryCopyPaths = dumpFunctionMetadata(dbName, dbRoot, dbDataRoot, hiveDb); String uniqueKey = Utils.setDbBootstrapDumpState(hiveDb, dbName); Exception caught = null; try (Writer writer = new Writer(dbRoot, conf)) { @@ -873,7 +874,7 @@ Long bootStrapDump(Path dumpRoot, DumpMetaData dmd, Path cmRoot, Hive hiveDb) long executorId = conf.getLong(Constants.SCHEDULED_QUERY_EXECUTIONID, 0L); dmd.setDump(DumpType.BOOTSTRAP, bootDumpBeginReplId, bootDumpEndReplId, cmRoot, executorId); dmd.write(true); - + work.setFunctionCopyPathIterator(functionsBinaryCopyPaths.iterator()); work.setDirCopyIterator(extTableCopyWorks.iterator()); work.setManagedTableCopyPathIterator(managedTableCopyPaths.iterator()); return bootDumpBeginReplId; @@ -1059,24 +1060,30 @@ private Path getLatestDumpPath(Path dumpRoot) throws IOException { return null; } - void dumpFunctionMetadata(String dbName, Path dbMetadataRoot, Hive hiveDb) throws Exception { - Path functionsRoot = new Path(dbMetadataRoot, ReplUtils.FUNCTIONS_ROOT_DIR_NAME); + List dumpFunctionMetadata(String dbName, Path dbMetadataRoot, Path dbDataRoot, + Hive hiveDb) throws Exception { + List functionsBinaryCopyPaths = new ArrayList<>(); + Path functionsMetaRoot = new Path(dbMetadataRoot, ReplUtils.FUNCTIONS_ROOT_DIR_NAME); + Path functionsDataRoot = new Path(dbDataRoot, ReplUtils.FUNCTIONS_ROOT_DIR_NAME); List functionNames = hiveDb.getFunctions(dbName, "*"); for (String functionName : functionNames) { HiveWrapper.Tuple tuple = functionTuple(functionName, dbName, hiveDb); if (tuple == null) { continue; } - Path functionRoot = new Path(functionsRoot, functionName); - Path functionMetadataFile = new Path(functionRoot, FUNCTION_METADATA_FILE_NAME); + Path functionMetaRoot = new Path(functionsMetaRoot, functionName); + Path functionMetadataFile = new Path(functionMetaRoot, FUNCTION_METADATA_FILE_NAME); + Path functionDataRoot = new Path(functionsDataRoot, functionName); try (JsonWriter jsonWriter = new JsonWriter(functionMetadataFile.getFileSystem(conf), functionMetadataFile)) { - FunctionSerializer serializer = new FunctionSerializer(tuple.object, conf); + FunctionSerializer serializer = new FunctionSerializer(tuple.object, functionDataRoot, conf); serializer.writeTo(jsonWriter, tuple.replicationSpec); + functionsBinaryCopyPaths.addAll(serializer.getFunctionBinaryCopyPaths()); } work.getMetricCollector().reportStageProgress(getName(), ReplUtils.MetricName.FUNCTIONS.name(), 1); replLogger.functionLog(functionName); } + return functionsBinaryCopyPaths; } void dumpConstraintMetadata(String dbName, String tblName, Path dbRoot, Hive hiveDb) throws Exception { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpWork.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpWork.java index bccaf9417b..95a5844742 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/ReplDumpWork.java @@ -37,6 +37,7 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.Map; @Explain(displayName = "Replication Dump Operator", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, @@ -55,6 +56,7 @@ private Integer maxEventLimit; private transient Iterator dirCopyIterator; private transient Iterator managedTableCopyPathIterator; + private transient Iterator functionCopyPathIterator; private Path currentDumpPath; private List resultValues; private boolean shouldOverwrite; @@ -144,6 +146,13 @@ public void setManagedTableCopyPathIterator(Iterator functionCopyPathIterator) { + if (this.functionCopyPathIterator != null) { + throw new IllegalStateException("Function copy path iterator has already been initialized"); + } + this.functionCopyPathIterator = functionCopyPathIterator; + } + public boolean tableDataCopyIteratorsInitialized() { return dirCopyIterator != null || managedTableCopyPathIterator != null; } @@ -196,6 +205,22 @@ public void setResultValues(List resultValues) { return tasks; } + public List> functionsBinariesCopyTasks(TaskTracker tracker, HiveConf conf) { + List> tasks = new ArrayList<>(); + if (functionCopyPathIterator != null) { + while (functionCopyPathIterator.hasNext() && tracker.canAddMoreTasks()) { + EximUtil.FunctionBinaryCopyPath binaryCopyPath = functionCopyPathIterator.next(); + Task copyTask = ReplCopyTask.getLoadCopyTask( + binaryCopyPath.getReplicationSpec(), binaryCopyPath.getSrcPath(), binaryCopyPath.getTargetPath(), conf + ); + tasks.add(copyTask); + tracker.addTask(copyTask); + LOG.debug("added task for {}", binaryCopyPath); + } + } + return tasks; + } + public void setShouldOverwrite(boolean shouldOverwrite) { this.shouldOverwrite = shouldOverwrite; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java index fb6a38cd43..56eaf5e5be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java @@ -217,6 +217,52 @@ public static void setNullSrcPath(HiveConf conf, boolean aNullSrcPath) { } } + + /** + * Wrapper class for mapping source and target path for copying function binaries. + */ + public static class FunctionBinaryCopyPath { + private ReplicationSpec replicationSpec; + private Path srcPath; + private Path tgtPath; + + public FunctionBinaryCopyPath(ReplicationSpec replicationSpec, Path srcPath, Path tgtPath) { + this.replicationSpec = replicationSpec; + if (srcPath == null) { + throw new IllegalArgumentException("Source path can not be null."); + } + this.srcPath = srcPath; + if (tgtPath == null) { + throw new IllegalArgumentException("Target path can not be null."); + } + this.tgtPath = tgtPath; + } + + public Path getSrcPath() { + return srcPath; + } + + public Path getTargetPath() { + return tgtPath; + } + + @Override + public String toString() { + return "FunctionBinaryCopyPath{" + + "fullyQualifiedSourcePath=" + srcPath + + ", fullyQualifiedTargetPath=" + tgtPath + + '}'; + } + + public ReplicationSpec getReplicationSpec() { + return replicationSpec; + } + + public void setReplicationSpec(ReplicationSpec replicationSpec) { + this.replicationSpec = replicationSpec; + } + } + private EximUtil() { } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java index c9e1041fc1..addd5793b8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java @@ -20,13 +20,25 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.ReplChangeManager; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NotificationEvent; import org.apache.hadoop.hive.metastore.messaging.CreateFunctionMessage; +import org.apache.hadoop.hive.ql.metadata.HiveFatalException; import org.apache.hadoop.hive.ql.parse.EximUtil; +import org.apache.hadoop.hive.ql.parse.repl.CopyUtils; import org.apache.hadoop.hive.ql.parse.repl.DumpType; import org.apache.hadoop.hive.ql.parse.repl.dump.io.FunctionSerializer; import org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter; +import org.apache.hadoop.hive.ql.parse.EximUtil.FunctionBinaryCopyPath; + +import javax.security.auth.login.LoginException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + class CreateFunctionHandler extends AbstractEventHandler { CreateFunctionHandler(NotificationEvent event) { super(event); @@ -41,13 +53,36 @@ CreateFunctionMessage eventMessage(String stringRepresentation) { public void handle(Context withinContext) throws Exception { LOG.info("Processing#{} CREATE_FUNCTION message : {}", fromEventId(), eventMessageAsJSON); Path metadataPath = new Path(withinContext.eventRoot, EximUtil.METADATA_NAME); + Path dataPath = new Path(withinContext.eventRoot, EximUtil.DATA_PATH_NAME); FileSystem fileSystem = metadataPath.getFileSystem(withinContext.hiveConf); - + List functionBinaryCopyPaths = new ArrayList<>(); try (JsonWriter jsonWriter = new JsonWriter(fileSystem, metadataPath)) { - new FunctionSerializer(eventMessage.getFunctionObj(), withinContext.hiveConf) - .writeTo(jsonWriter, withinContext.replicationSpec); + FunctionSerializer serializer = new FunctionSerializer(eventMessage.getFunctionObj(), + dataPath, withinContext.hiveConf); + serializer.writeTo(jsonWriter, withinContext.replicationSpec); + functionBinaryCopyPaths.addAll(serializer.getFunctionBinaryCopyPaths()); } withinContext.createDmd(this).write(); + copyFunctionBinaries(functionBinaryCopyPaths, withinContext.hiveConf); + } + + private void copyFunctionBinaries(List functionBinaryCopyPaths, HiveConf hiveConf) + throws MetaException, IOException, LoginException, HiveFatalException { + if (!functionBinaryCopyPaths.isEmpty()) { + String distCpDoAsUser = hiveConf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER); + List filePaths = new ArrayList<>(); + for (FunctionBinaryCopyPath funcBinCopyPath : functionBinaryCopyPaths) { + String [] decodedURISplits = ReplChangeManager.decodeFileUri(funcBinCopyPath.getSrcPath().toString()); + ReplChangeManager.FileInfo fileInfo = ReplChangeManager.getFileInfo(new Path(decodedURISplits[0]), + decodedURISplits[1], decodedURISplits[2], decodedURISplits[3], hiveConf); + filePaths.add(fileInfo); + Path destRoot = funcBinCopyPath.getTargetPath().getParent(); + FileSystem dstFs = destRoot.getFileSystem(hiveConf); + CopyUtils copyUtils = new CopyUtils(distCpDoAsUser, hiveConf, dstFs); + copyUtils.copyAndVerify(destRoot, filePaths, funcBinCopyPath.getSrcPath(), false); + copyUtils.renameFileCopiedFromCmPath(destRoot, dstFs, filePaths); + } + } } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java index 733bab522f..797327eaa0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FunctionSerializer.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.ResourceUri; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.parse.EximUtil; import org.apache.hadoop.hive.ql.parse.ReplicationSpec; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.PathBuilder; @@ -40,10 +41,13 @@ public static final String FIELD_NAME = "function"; private Function function; private HiveConf hiveConf; + private Path functionDataRoot; + private List functionBinaryCopyPaths = new ArrayList<>(); - public FunctionSerializer(Function function, HiveConf hiveConf) { + public FunctionSerializer(Function function, Path functionDataRoot, HiveConf hiveConf) { this.hiveConf = hiveConf; this.function = function; + this.functionDataRoot = functionDataRoot; } @Override @@ -58,9 +62,12 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi FileSystem fileSystem = inputPath.getFileSystem(hiveConf); Path qualifiedUri = PathBuilder.fullyQualifiedHDFSUri(inputPath, fileSystem); String checkSum = ReplChangeManager.checksumFor(qualifiedUri, fileSystem); - String newFileUri = ReplChangeManager.getInstance(hiveConf) + String encodedSrcUri = ReplChangeManager.getInstance(hiveConf) .encodeFileUri(qualifiedUri.toString(), checkSum, null); - resourceUris.add(new ResourceUri(uri.getResourceType(), newFileUri)); + Path newBinaryPath = new Path(functionDataRoot, qualifiedUri.getName()); + resourceUris.add(new ResourceUri(uri.getResourceType(),newBinaryPath.toString())); + functionBinaryCopyPaths.add(new EximUtil.FunctionBinaryCopyPath(additionalPropertiesProvider, + new Path(encodedSrcUri), newBinaryPath)); } else { resourceUris.add(uri); } @@ -84,4 +91,8 @@ public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvi throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e); } } + + public List getFunctionBinaryCopyPaths() { + return functionBinaryCopyPaths; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java index 948d201ddc..ae550a2723 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.function.create.CreateFunctionDesc; import org.apache.hadoop.hive.ql.exec.FunctionUtils; -import org.apache.hadoop.hive.ql.exec.ReplCopyTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.parse.EximUtil; @@ -38,6 +37,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.repl.PathBuilder; import org.apache.hadoop.hive.ql.parse.repl.load.MetaData; +import org.apache.hadoop.hive.ql.plan.CopyWork; import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork; import java.io.IOException; @@ -193,10 +193,8 @@ ResourceUri destinationResourceUri(ResourceUri resourceUri) new Path(functionsRootDir).getFileSystem(context.hiveConf) ); - Task copyTask = ReplCopyTask.getLoadCopyTask( - metadata.getReplicationSpec(), new Path(sourceUri), qualifiedDestinationPath, - context.hiveConf - ); + Task copyTask = TaskFactory.get( + new CopyWork(new Path(sourceUri), qualifiedDestinationPath, true, true), context.hiveConf); replCopyTasks.add(copyTask); ResourceUri destinationUri = new ResourceUri(resourceUri.getResourceType(), qualifiedDestinationPath.toString()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java index 018983f6dc..f69776ad7b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java @@ -33,6 +33,7 @@ private Path[] fromPath; private Path[] toPath; private boolean errorOnSrcEmpty; + private boolean overwrite = true; public CopyWork() { } @@ -42,6 +43,12 @@ public CopyWork(final Path fromPath, final Path toPath, boolean errorOnSrcEmpty) this.setErrorOnSrcEmpty(errorOnSrcEmpty); } + public CopyWork(final Path fromPath, final Path toPath, boolean errorOnSrcEmpty, boolean overwrite) { + this(new Path[] { fromPath }, new Path[] { toPath }); + this.setErrorOnSrcEmpty(errorOnSrcEmpty); + this.setOverwrite(overwrite); + } + public CopyWork(final Path[] fromPath, final Path[] toPath) { if (fromPath.length != toPath.length) { throw new RuntimeException( @@ -87,4 +94,12 @@ public void setErrorOnSrcEmpty(boolean errorOnSrcEmpty) { public boolean isErrorOnSrcEmpty() { return errorOnSrcEmpty; } + + public boolean isOverwrite() { + return overwrite; + } + + public void setOverwrite(boolean overwrite) { + this.overwrite = overwrite; + } } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java index 8454b9c420..de52b7d591 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/repl/TestReplDumpTask.java @@ -87,7 +87,9 @@ String getValidTxnListForReplDump(Hive hiveDb, long waitUntilTime) { } @Override - void dumpFunctionMetadata(String dbName, Path dbMetadataRoot, Hive hiveDb) { + List dumpFunctionMetadata(String dbName, Path dbMetadataRoot, Path dbDataRoot, + Hive hiveDb) { + return Collections.emptyList(); } @Override