Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (revision 26877) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (working copy) @@ -1011,10 +1011,6 @@ return null; } - try { - fs.close(); - } catch (IOException e) { - } String file = path.makeQualified(fs).toString(); // For compatibility with hadoop 0.17, change file:/a/b/c to file:///a/b/c if (StringUtils.startsWith(file, "file:/") && !StringUtils.startsWith(file, "file:///")) { Index: ql/src/java/org/apache/hadoop/hive/ql/Context.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Context.java (revision 26877) +++ ql/src/java/org/apache/hadoop/hive/ql/Context.java (working copy) @@ -51,6 +51,7 @@ * each query should call clear() at end of use to remove temporary folders */ public class Context { + private boolean isHDFSCleanup; private Path resFile; private Path resDir; private FileSystem resFs; @@ -166,12 +167,16 @@ throw new RuntimeException("Cannot make directory: " + dirPath.toString()); } + if (isHDFSCleanup) { + fs.deleteOnExit(dirPath); + } } catch (IOException e) { throw new RuntimeException (e); } } dir = dirPath.toString(); fsScratchDirs.put(fileSystem, dir); + } return dir; } @@ -568,6 +573,20 @@ paths.addAll(toAdd); } + /** + * @return the isHDFSCleanup + */ + public boolean isHDFSCleanup() { + return isHDFSCleanup; + } + + /** + * @param isHDFSCleanup the isHDFSCleanup to set + */ + public void setHDFSCleanup(boolean isHDFSCleanup) { + this.isHDFSCleanup = isHDFSCleanup; + } + public boolean isNeedLockMgr() { return needLockMgr; } Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 26877) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -87,7 +87,6 @@ import org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.ParseDriver; -import org.apache.hadoop.hive.ql.parse.ParseException; import org.apache.hadoop.hive.ql.parse.ParseUtils; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; @@ -410,6 +409,7 @@ ctx = new Context(conf); ctx.setTryCount(getTryCount()); ctx.setCmd(command); + ctx.setHDFSCleanup(true); ParseDriver pd = new ParseDriver(); ASTNode tree = pd.parse(command, ctx); @@ -1091,6 +1091,7 @@ Map running = new HashMap(); DriverContext driverCxt = new DriverContext(runnable, ctx); + ctx.setHDFSCleanup(true); SessionState.get().setLastMapRedStatsList(new ArrayList()); SessionState.get().setStackTraces(new HashMap>>());