diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java index 7e4f386..06c3b29 100644 --- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.common; +import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; @@ -44,6 +45,7 @@ import org.apache.hadoop.hive.shims.Utils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Shell; +import org.apache.hive.common.util.ShutdownHookManager; /** @@ -766,4 +768,33 @@ public static FileStatus getFileStatusOrNull(FileSystem fs, Path path) throws IO return null; } } + + public static void deleteDirectory(File directory) throws IOException { + org.apache.commons.io.FileUtils.deleteDirectory(directory); + } + + public static File createTempFile(String lScratchDir, String prefix, String suffix) throws IOException { + File tmpDir = new File(lScratchDir); + if (!tmpDir.exists() && !tmpDir.mkdirs()) { + // Do another exists to check to handle possible race condition + // Another thread might have created the dir, if that is why + // mkdirs returned false, that is fine + if (!tmpDir.exists()) { + throw new RuntimeException("Unable to create temp directory " + + lScratchDir); + } + } + File tmpFile = File.createTempFile(prefix, suffix, tmpDir); + ShutdownHookManager.deleteOnExit(tmpFile); + return tmpFile; + } + + public static boolean deleteTmpFile(File tempFile) { + if (tempFile != null) { + tempFile.delete(); + ShutdownHookManager.cancelDeleteOnExit(tempFile); + return true; + } + return false; + } } diff --git a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java index fd2f20a..5e53145 100644 --- a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java +++ b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java @@ -18,6 +18,7 @@ package org.apache.hive.common.util; +import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -44,15 +45,18 @@ private static final ShutdownHookManager MGR = new ShutdownHookManager(); + private static final DeleteOnExitHook DELETE_ON_EXIT_HOOK = new DeleteOnExitHook(); + private static final Log LOG = LogFactory.getLog(ShutdownHookManager.class); static { + MGR.addShutdownHookInternal(DELETE_ON_EXIT_HOOK, -1); Runtime.getRuntime().addShutdownHook( new Thread() { @Override public void run() { MGR.shutdownInProgress.set(true); - for (Runnable hook: MGR.getShutdownHooksInOrder()) { + for (Runnable hook : getShutdownHooksInOrder()) { try { hook.run(); } catch (Throwable ex) { @@ -115,7 +119,7 @@ private ShutdownHookManager() { return MGR.getShutdownHooksInOrderInternal(); } - List getShutdownHooksInOrderInternal() { + private List getShutdownHooksInOrderInternal() { List list; synchronized (MGR.hooks) { list = new ArrayList(MGR.hooks); @@ -145,6 +149,9 @@ public int compare(HookEntry o1, HookEntry o2) { * @param priority priority of the shutdownHook. */ public static void addShutdownHook(Runnable shutdownHook, int priority) { + if (priority < 0) { + throw new IllegalArgumentException("Priority should be greater than or equal to zero"); + } MGR.addShutdownHookInternal(shutdownHook, priority); } @@ -202,4 +209,29 @@ public static boolean isShutdownInProgress() { private boolean isShutdownInProgressInternal() { return shutdownInProgress.get(); } + + public static void deleteOnExit(File file) { + if (isShutdownInProgress()) { + throw new IllegalStateException("Shutdown in progress, cannot add a deleteOnExit"); + } + DELETE_ON_EXIT_HOOK.deleteTargets.add(file); + } + + public static void cancelDeleteOnExit(File file) { + if (isShutdownInProgress()) { + throw new IllegalStateException("Shutdown in progress, cannot cancel a deleteOnExit"); + } + DELETE_ON_EXIT_HOOK.deleteTargets.remove(file); + } + + private static class DeleteOnExitHook implements Runnable { + private final Set deleteTargets = Collections.synchronizedSet(new HashSet()); + + @Override + public void run() { + for (File deleteTarget : deleteTargets) { + deleteTarget.delete(); + } + } + } } diff --git a/common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java b/common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java index fa30f15..db8fb13 100644 --- a/common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java +++ b/common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java @@ -30,7 +30,7 @@ @Test public void shutdownHookManager() { - Assert.assertEquals(0, ShutdownHookManager.getShutdownHooksInOrder().size()); + Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size()); Runnable hook1 = new Runnable() { @Override public void run() { @@ -44,21 +44,21 @@ public void run() { ShutdownHookManager.addShutdownHook(hook1, 0); Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1)); - Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size()); + Assert.assertEquals(2, ShutdownHookManager.getShutdownHooksInOrder().size()); Assert.assertEquals(hook1, ShutdownHookManager.getShutdownHooksInOrder().get(0)); ShutdownHookManager.removeShutdownHook(hook1); Assert.assertFalse(ShutdownHookManager.hasShutdownHook(hook1)); ShutdownHookManager.addShutdownHook(hook1, 0); Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1)); - Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size()); + Assert.assertEquals(2, ShutdownHookManager.getShutdownHooksInOrder().size()); Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1)); - Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size()); + Assert.assertEquals(2, ShutdownHookManager.getShutdownHooksInOrder().size()); ShutdownHookManager.addShutdownHook(hook2, 1); Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1)); Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook2)); - Assert.assertEquals(2, ShutdownHookManager.getShutdownHooksInOrder().size()); + Assert.assertEquals(3, ShutdownHookManager.getShutdownHooksInOrder().size()); Assert.assertEquals(hook2, ShutdownHookManager.getShutdownHooksInOrder().get(0)); Assert.assertEquals(hook1, ShutdownHookManager.getShutdownHooksInOrder().get(1)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 41b4bb1..1cdfdda 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -41,7 +41,6 @@ import java.util.concurrent.CancellationException; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -50,6 +49,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -297,6 +297,16 @@ public void setTmpErrOutputFile(File tmpErrOutputFile) { this.tmpErrOutputFile = tmpErrOutputFile; } + public void deleteTmpOutputFile() { + FileUtils.deleteTmpFile(tmpOutputFile); + // do not nullify. will be reused + } + + public void deleteTmpErrOutputFile() { + FileUtils.deleteTmpFile(tmpErrOutputFile); + // do not nullify. will be reused + } + public boolean getIsSilent() { if(conf != null) { return conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT); @@ -832,25 +842,10 @@ public void setActiveAuthorizer(Object authorizer) { * @throws IOException */ private static File createTempFile(HiveConf conf) throws IOException { - String lScratchDir = - HiveConf.getVar(conf, HiveConf.ConfVars.LOCALSCRATCHDIR); - - File tmpDir = new File(lScratchDir); + String lScratchDir = HiveConf.getVar(conf, HiveConf.ConfVars.LOCALSCRATCHDIR); String sessionID = conf.getVar(HiveConf.ConfVars.HIVESESSIONID); - if (!tmpDir.exists()) { - if (!tmpDir.mkdirs()) { - //Do another exists to check to handle possible race condition - // Another thread might have created the dir, if that is why - // mkdirs returned false, that is fine - if(!tmpDir.exists()){ - throw new RuntimeException("Unable to create log directory " - + lScratchDir); - } - } - } - File tmpFile = File.createTempFile(sessionID, ".pipeout", tmpDir); - tmpFile.deleteOnExit(); - return tmpFile; + + return FileUtils.createTempFile(lScratchDir, sessionID, ".pipeout"); } /** @@ -1537,6 +1532,8 @@ public void close() throws IOException { closeSparkSession(); registry.closeCUDFLoaders(); dropSessionPaths(conf); + deleteTmpOutputFile(); + deleteTmpErrOutputFile(); } public void closeSparkSession() { diff --git a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java index 1d1e995..c40c269 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java @@ -200,10 +200,8 @@ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws H private void cleanTmpFile() { resetResultReader(); SessionState sessionState = getParentSession().getSessionState(); - File tmp = sessionState.getTmpOutputFile(); - tmp.delete(); - tmp = sessionState.getTmpErrOutputFile(); - tmp.delete(); + sessionState.deleteTmpOutputFile(); + sessionState.deleteTmpErrOutputFile(); } private void resetResultReader() { diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index 175348b..e2ee388 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -293,13 +293,8 @@ private void cleanup(OperationState state) throws HiveSQLException { driver = null; SessionState ss = SessionState.get(); - if (ss.getTmpOutputFile() != null) { - ss.getTmpOutputFile().delete(); - } - - if (ss.getTmpErrOutputFile() != null) { - ss.getTmpErrOutputFile().delete(); - } + ss.deleteTmpOutputFile(); + ss.deleteTmpErrOutputFile(); } @Override