diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java index 7e4f386..d781f08 100644 --- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.common; +import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; @@ -44,6 +45,7 @@ import org.apache.hadoop.hive.shims.Utils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Shell; +import org.apache.hive.common.util.ShutdownHookManager; /** @@ -766,4 +768,40 @@ public static FileStatus getFileStatusOrNull(FileSystem fs, Path path) throws IO return null; } } + + public static void deleteDirectory(File directory) throws IOException { + org.apache.commons.io.FileUtils.deleteDirectory(directory); + } + + /** + * create temporary file and register it to delete-on-exit hook. + * File.deleteOnExit is not used for possible memory leakage. + */ + public static File createTempFile(String lScratchDir, String prefix, String suffix) throws IOException { + File tmpDir = lScratchDir == null ? null : new File(lScratchDir); + if (tmpDir != null && !tmpDir.exists() && !tmpDir.mkdirs()) { + // Do another exists to check to handle possible race condition + // Another thread might have created the dir, if that is why + // mkdirs returned false, that is fine + if (!tmpDir.exists()) { + throw new RuntimeException("Unable to create temp directory " + + lScratchDir); + } + } + File tmpFile = File.createTempFile(prefix, suffix, tmpDir); + ShutdownHookManager.deleteOnExit(tmpFile); + return tmpFile; + } + + /** + * delete a temporary file and remove it from delete-on-exit hook. + */ + public static boolean deleteTmpFile(File tempFile) { + if (tempFile != null) { + tempFile.delete(); + ShutdownHookManager.cancelDeleteOnExit(tempFile); + return true; + } + return false; + } } diff --git a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java index fd2f20a..0392eb5 100644 --- a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java +++ b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java @@ -18,6 +18,9 @@ package org.apache.hive.common.util; +import com.google.common.annotations.VisibleForTesting; + +import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -44,15 +47,18 @@ private static final ShutdownHookManager MGR = new ShutdownHookManager(); + private static final DeleteOnExitHook DELETE_ON_EXIT_HOOK = new DeleteOnExitHook(); + private static final Log LOG = LogFactory.getLog(ShutdownHookManager.class); static { + MGR.addShutdownHookInternal(DELETE_ON_EXIT_HOOK, -1); Runtime.getRuntime().addShutdownHook( new Thread() { @Override public void run() { MGR.shutdownInProgress.set(true); - for (Runnable hook: MGR.getShutdownHooksInOrder()) { + for (Runnable hook : getShutdownHooksInOrder()) { try { hook.run(); } catch (Throwable ex) { @@ -115,7 +121,7 @@ private ShutdownHookManager() { return MGR.getShutdownHooksInOrderInternal(); } - List getShutdownHooksInOrderInternal() { + private List getShutdownHooksInOrderInternal() { List list; synchronized (MGR.hooks) { list = new ArrayList(MGR.hooks); @@ -145,6 +151,9 @@ public int compare(HookEntry o1, HookEntry o2) { * @param priority priority of the shutdownHook. */ public static void addShutdownHook(Runnable shutdownHook, int priority) { + if (priority < 0) { + throw new IllegalArgumentException("Priority should be greater than or equal to zero"); + } MGR.addShutdownHookInternal(shutdownHook, priority); } @@ -202,4 +211,43 @@ public static boolean isShutdownInProgress() { private boolean isShutdownInProgressInternal() { return shutdownInProgress.get(); } + + /** + * register file to delete-on-exit hook + * + * @see {@link org.apache.hadoop.hive.common.FileUtils#createTempFile} + */ + public static void deleteOnExit(File file) { + if (isShutdownInProgress()) { + throw new IllegalStateException("Shutdown in progress, cannot add a deleteOnExit"); + } + DELETE_ON_EXIT_HOOK.deleteTargets.add(file); + } + + /** + * deregister file from delete-on-exit hook + */ + public static void cancelDeleteOnExit(File file) { + if (isShutdownInProgress()) { + throw new IllegalStateException("Shutdown in progress, cannot cancel a deleteOnExit"); + } + DELETE_ON_EXIT_HOOK.deleteTargets.remove(file); + } + + @VisibleForTesting + static boolean isRegisteredToDeleteOnExit(File file) { + return DELETE_ON_EXIT_HOOK.deleteTargets.contains(file); + } + + private static class DeleteOnExitHook implements Runnable { + private final Set deleteTargets = Collections.synchronizedSet(new HashSet()); + + @Override + public void run() { + for (File deleteTarget : deleteTargets) { + deleteTarget.delete(); + } + deleteTargets.clear(); + } + } } diff --git a/common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java b/common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java index fa30f15..66f6073 100644 --- a/common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java +++ b/common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java @@ -21,6 +21,11 @@ import org.junit.Assert; import org.junit.Test; +import java.io.File; +import java.io.IOException; + +import org.apache.hadoop.hive.common.FileUtils; + /** * TestShutdownHookManager. * @@ -30,7 +35,7 @@ @Test public void shutdownHookManager() { - Assert.assertEquals(0, ShutdownHookManager.getShutdownHooksInOrder().size()); + Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size()); Runnable hook1 = new Runnable() { @Override public void run() { @@ -44,23 +49,30 @@ public void run() { ShutdownHookManager.addShutdownHook(hook1, 0); Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1)); - Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size()); + Assert.assertEquals(2, ShutdownHookManager.getShutdownHooksInOrder().size()); Assert.assertEquals(hook1, ShutdownHookManager.getShutdownHooksInOrder().get(0)); ShutdownHookManager.removeShutdownHook(hook1); Assert.assertFalse(ShutdownHookManager.hasShutdownHook(hook1)); ShutdownHookManager.addShutdownHook(hook1, 0); Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1)); - Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size()); + Assert.assertEquals(2, ShutdownHookManager.getShutdownHooksInOrder().size()); Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1)); - Assert.assertEquals(1, ShutdownHookManager.getShutdownHooksInOrder().size()); + Assert.assertEquals(2, ShutdownHookManager.getShutdownHooksInOrder().size()); ShutdownHookManager.addShutdownHook(hook2, 1); Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook1)); Assert.assertTrue(ShutdownHookManager.hasShutdownHook(hook2)); - Assert.assertEquals(2, ShutdownHookManager.getShutdownHooksInOrder().size()); + Assert.assertEquals(3, ShutdownHookManager.getShutdownHooksInOrder().size()); Assert.assertEquals(hook2, ShutdownHookManager.getShutdownHooksInOrder().get(0)); Assert.assertEquals(hook1, ShutdownHookManager.getShutdownHooksInOrder().get(1)); + } + @Test + public void deleteOnExit() throws IOException { + File file = FileUtils.createTempFile(null, "tmp", null); + Assert.assertTrue(ShutdownHookManager.isRegisteredToDeleteOnExit(file)); + FileUtils.deleteTmpFile(file); + Assert.assertFalse(ShutdownHookManager.isRegisteredToDeleteOnExit(file)); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 92ac209..34ec4d8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; import java.net.URI; import java.net.URISyntaxException; import java.net.URLClassLoader; @@ -43,7 +41,6 @@ import java.util.concurrent.CancellationException; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -52,6 +49,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -300,6 +298,14 @@ public void setTmpErrOutputFile(File tmpErrOutputFile) { this.tmpErrOutputFile = tmpErrOutputFile; } + public void deleteTmpOutputFile() { + FileUtils.deleteTmpFile(tmpOutputFile); + } + + public void deleteTmpErrOutputFile() { + FileUtils.deleteTmpFile(tmpErrOutputFile); + } + public boolean getIsSilent() { if(conf != null) { return conf.getBoolVar(HiveConf.ConfVars.HIVESESSIONSILENT); @@ -725,9 +731,8 @@ private void dropSessionPaths(Configuration conf) throws IOException { if (localSessionPath != null) { FileSystem.getLocal(conf).delete(localSessionPath, true); } - if (this.getTmpOutputFile().exists()) { - this.getTmpOutputFile().delete(); - } + deleteTmpOutputFile(); + deleteTmpErrOutputFile(); } /** @@ -835,25 +840,10 @@ public void setActiveAuthorizer(Object authorizer) { * @throws IOException */ private static File createTempFile(HiveConf conf) throws IOException { - String lScratchDir = - HiveConf.getVar(conf, HiveConf.ConfVars.LOCALSCRATCHDIR); - - File tmpDir = new File(lScratchDir); + String lScratchDir = HiveConf.getVar(conf, HiveConf.ConfVars.LOCALSCRATCHDIR); String sessionID = conf.getVar(HiveConf.ConfVars.HIVESESSIONID); - if (!tmpDir.exists()) { - if (!tmpDir.mkdirs()) { - //Do another exists to check to handle possible race condition - // Another thread might have created the dir, if that is why - // mkdirs returned false, that is fine - if(!tmpDir.exists()){ - throw new RuntimeException("Unable to create log directory " - + lScratchDir); - } - } - } - File tmpFile = File.createTempFile(sessionID, ".pipeout", tmpDir); - tmpFile.deleteOnExit(); - return tmpFile; + + return FileUtils.createTempFile(lScratchDir, sessionID, ".pipeout"); } /** diff --git a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java index 1d1e995..c40c269 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java @@ -200,10 +200,8 @@ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws H private void cleanTmpFile() { resetResultReader(); SessionState sessionState = getParentSession().getSessionState(); - File tmp = sessionState.getTmpOutputFile(); - tmp.delete(); - tmp = sessionState.getTmpErrOutputFile(); - tmp.delete(); + sessionState.deleteTmpOutputFile(); + sessionState.deleteTmpErrOutputFile(); } private void resetResultReader() { diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index 175348b..e2ee388 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -293,13 +293,8 @@ private void cleanup(OperationState state) throws HiveSQLException { driver = null; SessionState ss = SessionState.get(); - if (ss.getTmpOutputFile() != null) { - ss.getTmpOutputFile().delete(); - } - - if (ss.getTmpErrOutputFile() != null) { - ss.getTmpErrOutputFile().delete(); - } + ss.deleteTmpOutputFile(); + ss.deleteTmpErrOutputFile(); } @Override