diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java index 51c6715..8224ef9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java @@ -56,13 +56,18 @@ public SparkSessionImpl() { @Override public void open(HiveConf conf) throws HiveException { + LOG.info("Trying to open Spark session {}", sessionId); this.conf = conf; isOpen = true; try { hiveSparkClient = HiveSparkClientFactory.createHiveSparkClient(conf); } catch (Throwable e) { - throw new HiveException("Failed to create spark client.", e); + // It's possible that user session is closed while creating Spark client. + String msg = isOpen ? "Failed to create Spark client for Spark session " + sessionId : + "Spark Session " + sessionId + " is closed before Spark client is created"; + throw new HiveException(msg, e); } + LOG.info("Spark session {} is successfully opened", sessionId); } @Override @@ -121,10 +126,12 @@ public String getSessionId() { @Override public void close() { + LOG.info("Trying to close Spark session {}", sessionId); isOpen = false; if (hiveSparkClient != null) { try { hiveSparkClient.close(); + LOG.info("Spark session {} is successfully closed", sessionId); cleanScratchDir(); } catch (IOException e) { LOG.error("Failed to close spark session (" + sessionId + ").", e); diff --git spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java index 67f2158..a634b0f 100644 --- spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java +++ spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java @@ -109,19 +109,25 @@ // The RPC server will take care of timeouts here. this.driverRpc = rpcServer.registerClient(clientId, secret, protocol).get(); } catch (Throwable e) { + String errorMsg = null; if (e.getCause() instanceof TimeoutException) { - LOG.error("Timed out waiting for client to connect.\nPossible reasons include network " + + errorMsg = "Timed out waiting for client to connect.\nPossible reasons include network " + "issues, errors in remote driver or the cluster has no available resources, etc." + - "\nPlease check YARN or Spark driver's logs for further information.", e); + "\nPlease check YARN or Spark driver's logs for further information."; + } else if (e.getCause() instanceof InterruptedException) { + errorMsg = "Interruption occurred while waiting for client to connect.\nPossibly the Spark session is closed " + + "such as in case of query cancellation." + + "\nPlease refer to HiveServer2 logs for further information."; } else { - LOG.error("Error while waiting for client to connect.", e); + errorMsg = "Error while waiting for client to connect."; } + LOG.error(errorMsg, e); driverThread.interrupt(); try { driverThread.join(); } catch (InterruptedException ie) { // Give up. - LOG.debug("Interrupted before driver thread was finished."); + LOG.warn("Interrupted before driver thread was finished.", ie); } throw Throwables.propagate(e); }