diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java index 537c5e9..9191d22 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java @@ -91,14 +91,14 @@ Database readDbMetadata() throws SemanticException { // db. // TODO: we might revisit this in create-drop-recreate cases, needs some thinking on. DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), createDbDesc); - return TaskFactory.get(work, context.hiveConf); + return TaskFactory.get(work, context.hiveConf, true); } private static Task alterDbTask(Database dbObj, HiveConf hiveConf) { AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbObj.getName(), dbObj.getParameters(), null); DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc); - return TaskFactory.get(work, hiveConf); + return TaskFactory.get(work, hiveConf, true); } private Task setOwnerInfoTask(Database dbObj) { @@ -106,7 +106,7 @@ Database readDbMetadata() throws SemanticException { new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), null); DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc); - return TaskFactory.get(work, context.hiveConf); + return TaskFactory.get(work, context.hiveConf, true); } private boolean existEmptyDb(String dbName) throws InvalidOperationException, HiveException { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java index bad7962..0a82225 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadPartitions.java @@ -113,7 +113,7 @@ private String location() throws MetaException, HiveException { private void createTableReplLogTask() throws SemanticException { ReplStateLogWork replLogWork = new ReplStateLogWork(replLogger, tableDesc.getTableName(), tableDesc.tableType()); - Task replLogTask = TaskFactory.get(replLogWork, context.hiveConf); + Task replLogTask = TaskFactory.get(replLogWork, context.hiveConf, true); if (tracker.tasks().isEmpty()) { tracker.addTask(replLogTask); @@ -224,7 +224,8 @@ private void addPartition(boolean hasMorePartitions, AddPartitionDesc addPartiti Task addPartTask = TaskFactory.get( new DDLWork(new HashSet<>(), new HashSet<>(), addPartitionDesc), - context.hiveConf + context.hiveConf, + true ); Task movePartitionTask = movePartitionTask(table, partSpec, tmpPath); @@ -246,7 +247,7 @@ private void addPartition(boolean hasMorePartitions, AddPartitionDesc addPartiti ); loadTableWork.setInheritTableSpecs(false); MoveWork work = new MoveWork(new HashSet<>(), new HashSet<>(), loadTableWork, null, false); - return TaskFactory.get(work, context.hiveConf); + return TaskFactory.get(work, context.hiveConf, true); } private Path locationOnReplicaWarehouse(Table table, AddPartitionDesc.OnePartitionDesc partSpec) @@ -274,7 +275,8 @@ private Path locationOnReplicaWarehouse(Table table, AddPartitionDesc.OnePartiti desc.getPartition(0).setLocation(ptn.getLocation()); // use existing location return TaskFactory.get( new DDLWork(new HashSet<>(), new HashSet<>(), desc), - context.hiveConf + context.hiveConf, + true ); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java index 3b8cb68..e0721f1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/table/LoadTable.java @@ -234,7 +234,7 @@ private String location(ImportTableDesc tblDesc, Database parentDb) ); MoveWork moveWork = new MoveWork(new HashSet<>(), new HashSet<>(), loadTableWork, null, false); - Task loadTableTask = TaskFactory.get(moveWork, context.hiveConf); + Task loadTableTask = TaskFactory.get(moveWork, context.hiveConf, true); copyTask.addDependentTask(loadTableTask); return copyTask; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java index bdfb632..aef83b8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java @@ -317,10 +317,10 @@ public String getDatabaseName() { public Task getCreateTableTask(HashSet inputs, HashSet outputs, HiveConf conf) { switch (getDescType()) { - case TABLE: - return TaskFactory.get(new DDLWork(inputs, outputs, createTblDesc), conf); - case VIEW: - return TaskFactory.get(new DDLWork(inputs, outputs, createViewDesc), conf); + case TABLE: + return TaskFactory.get(new DDLWork(inputs, outputs, createTblDesc), conf, true); + case VIEW: + return TaskFactory.get(new DDLWork(inputs, outputs, createViewDesc), conf, true); } return null; }