diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index d52f994..a1616df 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -2789,7 +2789,9 @@ public void verifyAndSet(String name, String value) throws IllegalArgumentExcept // When either name or value is null, the set method below will fail, // and throw IllegalArgumentException set(name, value); - isSparkConfigUpdated = isSparkRelatedConfig(name); + if (isSparkRelatedConfig(name)) { + isSparkConfigUpdated = true; + } } } diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java index 3b7a525..cd472c7 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java @@ -142,4 +142,18 @@ public void testHiddenConfig() throws Exception { Assert.assertEquals("", conf2.get(HiveConf.ConfVars.METASTOREPWD.varname)); Assert.assertEquals("", conf2.get(HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname)); } + + @Test + public void testSparkConfigUpdate(){ + HiveConf conf = new HiveConf(); + Assert.assertFalse(conf.getSparkConfigUpdated()); + + conf.verifyAndSet("spark.master", "yarn-cluster"); + Assert.assertTrue(conf.getSparkConfigUpdated()); + conf.verifyAndSet("hive.execution.engine", "spark"); + Assert.assertTrue("Expected spark config updated.", conf.getSparkConfigUpdated()); + + conf.setSparkConfigUpdated(false); + Assert.assertFalse(conf.getSparkConfigUpdated()); + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java index 0268469..a61cdc5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkUtilities.java @@ -121,12 +121,16 @@ public static boolean isDedicatedCluster(Configuration conf) { public static SparkSession getSparkSession(HiveConf conf, SparkSessionManager sparkSessionManager) throws HiveException { SparkSession sparkSession = SessionState.get().getSparkSession(); + HiveConf sessionConf = SessionState.get().getConf(); // Spark configurations are updated close the existing session - if (conf.getSparkConfigUpdated()) { + // In case of async queries or confOverlay is not empty, + // sessionConf and conf are different objects + if (sessionConf.getSparkConfigUpdated() || conf.getSparkConfigUpdated()) { sparkSessionManager.closeSession(sparkSession); sparkSession = null; conf.setSparkConfigUpdated(false); + sessionConf.setSparkConfigUpdated(false); } sparkSession = sparkSessionManager.getSession(sparkSession, conf, true); SessionState.get().setSparkSession(sparkSession);