diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index 9a91e3f..f68a219 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -424,7 +424,7 @@ private static ImportTableDesc getBaseCreateTableDescFromTable(String dbName, loadPath, Utilities.getTableDesc(table), new TreeMap<>(), lft, writeId); loadTableWork.setStmtId(stmtId); MoveWork mv = new MoveWork(x.getInputs(), x.getOutputs(), loadTableWork, null, false); - Task loadTableTask = TaskFactory.get(mv); + Task loadTableTask = TaskFactory.get(mv, x.getConf()); copyTask.addDependentTask(loadTableTask); x.getTasks().add(copyTask); return loadTableTask; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java index 6c6ee02..00ce977 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java @@ -80,7 +80,7 @@ } Task alterDbTask = TaskFactory.get( - new DDLWork(readEntitySet, writeEntitySet, alterDbDesc)); + new DDLWork(readEntitySet, writeEntitySet, alterDbDesc), context.hiveConf); context.log.debug("Added alter database task : {}:{}", alterDbTask.getId(), actualDbName); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java index 77c2dd2..24732d5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java @@ -65,7 +65,7 @@ public String getFunctionName() { context.log.debug("Loading function desc : {}", descToLoad.toString()); Task createTask = TaskFactory.get( - new FunctionWork(descToLoad)); + new FunctionWork(descToLoad), context.hiveConf); context.log.debug("Added create function task : {}:{},{}", createTask.getId(), descToLoad.getFunctionName(), descToLoad.getClassName()); // This null check is specifically done as the same class is used to handle both incremental and @@ -183,15 +183,13 @@ ResourceUri destinationResourceUri(ResourceUri resourceUri) String sourceUri = resourceUri.getUri(); String[] split = sourceUri.split(Path.SEPARATOR); PathBuilder pathBuilder = new PathBuilder(functionsRootDir); - Path qualifiedDestinationPath = PathBuilder.fullyQualifiedHDFSUri( + Path qualifiedDestinationPath = pathBuilder .addDescendant(destinationDbName.toLowerCase()) .addDescendant(metadata.function.getFunctionName().toLowerCase()) .addDescendant(String.valueOf(System.nanoTime())) .addDescendant(ReplChangeManager.getFileWithChksumFromURI(split[split.length - 1])[0]) - .build(), - FileSystem.get(context.hiveConf) - ); + .build(); Task copyTask = ReplCopyTask.getLoadCopyTask( metadata.getReplicationSpec(), new Path(sourceUri), qualifiedDestinationPath,