diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java index 8224ef9..71783d3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionImpl.java @@ -80,9 +80,14 @@ public SparkJobRef submit(DriverContext driverContext, SparkWork sparkWork) thro public ObjectPair getMemoryAndCores() throws Exception { SparkConf sparkConf = hiveSparkClient.getSparkConf(); int numExecutors = hiveSparkClient.getExecutorCount(); - // at start-up, we may be unable to get number of executors + // at start-up, we may be unable to get number of executors, use the configuration values + // in this case if (numExecutors <= 0) { - return new ObjectPair(-1L, -1); + numExecutors = sparkConf.getInt("spark.executor.instances", -1); + if (numExecutors == -1) { + // If we do not have any information return immediately + return new ObjectPair(-1L, -1); + } } int executorMemoryInMB = Utils.memoryStringToMb( sparkConf.get("spark.executor.memory", "512m"));