diff --git a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java index 262eaa2..2cebe72 100644 --- a/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java +++ b/beeline/src/java/org/apache/hive/beeline/HiveSchemaTool.java @@ -1558,6 +1558,6 @@ public static void main(String[] args) { System.exit(1); } System.out.println("schemaTool completed"); - + System.exit(0); } } diff --git a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java index a1fef29..fd12b64 100644 --- a/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java +++ b/common/src/java/org/apache/hive/common/util/ShutdownHookManager.java @@ -26,6 +26,8 @@ import org.apache.hadoop.fs.FileSystem; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is just a wrapper around hadoop's ShutdownHookManager but also manages delete on exit hook for temp files. @@ -36,6 +38,8 @@ private static final DeleteOnExitHook DELETE_ON_EXIT_HOOK = new DeleteOnExitHook(); + static final private Logger LOG = LoggerFactory.getLogger(ShutdownHookManager.class.getName()); + static { MGR.addShutdownHook(DELETE_ON_EXIT_HOOK, -1); } @@ -93,7 +97,7 @@ public static boolean removeShutdownHook(Runnable shutdownHook) { */ public static void deleteOnExit(File file) { if (MGR.isShutdownInProgress()) { - throw new IllegalStateException("Shutdown in progress, cannot add a deleteOnExit"); + LOG.warn("Shutdown in progress, cannot add a deleteOnExit"); } DELETE_ON_EXIT_HOOK.deleteTargets.add(file); } @@ -103,7 +107,7 @@ public static void deleteOnExit(File file) { */ public static void cancelDeleteOnExit(File file) { if (MGR.isShutdownInProgress()) { - throw new IllegalStateException("Shutdown in progress, cannot cancel a deleteOnExit"); + LOG.warn("Shutdown in progress, cannot cancel a deleteOnExit"); } DELETE_ON_EXIT_HOOK.deleteTargets.remove(file); } diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 30b6daf..458158e 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -202,9 +202,9 @@ public HiveConnection(String uri, Properties info) throws SQLException { if (isEmbeddedMode) { EmbeddedThriftBinaryCLIService embeddedClient = new EmbeddedThriftBinaryCLIService(); - embeddedClient.init(null); + embeddedClient.init(null, connParams.getHiveConfs()); client = embeddedClient; - + connParams.getHiveConfs().clear(); // open client session openSession(); executeInitSql(); diff --git a/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java index accba80..8b61874 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java @@ -18,6 +18,8 @@ package org.apache.hive.service.cli.thrift; +import java.util.Map; + import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.service.cli.CLIService; import org.apache.hive.service.cli.ICLIService; @@ -37,11 +39,26 @@ public EmbeddedThriftBinaryCLIService() { @Override public synchronized void init(HiveConf hiveConf) { - // Null HiveConf is passed in jdbc driver side code since driver side is supposed to be - // independent of conf object. Create new HiveConf object here in this case. - if (hiveConf == null) { - hiveConf = new HiveConf(); - } + init(hiveConf, null); + } + + public synchronized void init(HiveConf hiveConf, Map confOverlay) { + // Null HiveConf is passed in jdbc driver side code since driver side is supposed to be + // independent of conf object. Create new HiveConf object here in this case. + if (hiveConf == null) { + hiveConf = new HiveConf(); + } + // Set the specific parameters if needed + if (confOverlay != null && !confOverlay.isEmpty()) { + // apply overlay query specific settings, if any + for (Map.Entry confEntry : confOverlay.entrySet()) { + try { + hiveConf.set(confEntry.getKey(), confEntry.getValue()); + } catch (IllegalArgumentException e) { + throw new RuntimeException("Error applying statement specific settings", e); + } + } + } cliService.init(hiveConf); cliService.start(); super.init(hiveConf); diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java index 70746e8..1dd97f2 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/tools/HiveSchemaHelper.java @@ -44,9 +44,11 @@ public static final String DB_MYSQL = "mysql"; public static final String DB_POSTGRACE = "postgres"; public static final String DB_ORACLE = "oracle"; - public static final String EMBEDDED_HS2_URL = "jdbc:hive2://"; + public static final String EMBEDDED_HS2_URL = + "jdbc:hive2://?hive.conf.restricted.list=;hive.security.authorization.sqlstd.confwhitelist=*;" + + "hive.security.authorization.sqlstd.confwhitelist.append=*;hive.security.authorization.enabled=false;" + + "hive.metastore.uris="; public static final String HIVE_JDBC_DRIVER = "org.apache.hive.jdbc.HiveDriver"; - /*** * Get JDBC connection to metastore db