diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java index d31a2022618507e79c59c5aeed1a4d2cf4fb3729..bc0fb2e076f288c1940c3c56b768b94f4c12d09d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java @@ -229,7 +229,7 @@ private SparkJobRef submit(final DriverContext driverContext, final SparkWork sp return new RemoteSparkJobRef(hiveConf, jobHandle, sparkJobStatus); } - private void refreshLocalResources(SparkWork sparkWork, HiveConf conf) throws IOException { + private synchronized void refreshLocalResources(SparkWork sparkWork, HiveConf conf) throws IOException { // add hive-exec jar addJars((new JobConf(this.getClass())).getJar()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java index 6a8b42e92689aa943fc662e4a63e3eb6633116ab..7df9f8fea7dc26f86f65b1fbd5e43191bf1e38a5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java @@ -98,7 +98,6 @@ private final String sessionId; private volatile HiveSparkClient hiveSparkClient; private volatile Path scratchDir; - private final Object dirLock = new Object(); /** * The timestamp of the last completed Spark job. @@ -328,11 +327,7 @@ private void cleanScratchDir() throws IOException { @Override public Path getHDFSSessionDir() throws IOException { if (scratchDir == null) { - synchronized (dirLock) { - if (scratchDir == null) { - scratchDir = createScratchDir(); - } - } + scratchDir = createScratchDir(); } return scratchDir; }