diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java index 0ac9109..beba592 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.io.AcidUtils; @@ -233,7 +234,7 @@ private void commit(FileSystem fs) throws HiveException { needToRename = false; } } - if (needToRename && !fs.rename(outPaths[idx], finalPaths[idx])) { + if (filesCreated && needToRename && !fs.rename(outPaths[idx], finalPaths[idx])) { throw new HiveException("Unable to rename output from: " + outPaths[idx] + " to: " + finalPaths[idx]); } @@ -1009,7 +1010,7 @@ public void closeOp(boolean abort) throws HiveException { row_count.set(numRows); LOG.info(toString() + ": records written - " + numRows); - if (!bDynParts && !filesCreated) { + if (!bDynParts && !filesCreated && !"tez".equalsIgnoreCase(hconf.get(ConfVars.HIVE_EXECUTION_ENGINE.varname))) { createBucketFiles(fsp); }