diff --git jdbc/src/java/org/apache/hive/jdbc/EmbeddedCLIServicePortal.java jdbc/src/java/org/apache/hive/jdbc/EmbeddedCLIServicePortal.java new file mode 100644 index 0000000..c572ecc --- /dev/null +++ jdbc/src/java/org/apache/hive/jdbc/EmbeddedCLIServicePortal.java @@ -0,0 +1,42 @@ +package org.apache.hive.jdbc; + +import java.util.Map; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hive.service.Service; +import org.apache.hive.service.rpc.thrift.TCLIService; +import org.apache.hive.service.rpc.thrift.TCLIService.Iface; + +public class EmbeddedCLIServicePortal { + + public static Iface get(Map hiveConfs) { + TCLIService.Iface embeddedClient; + try { + Class clazz = + (Class) Class.forName("org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService"); + embeddedClient = clazz.newInstance(); + ((Service) embeddedClient).init(buildOverlayedConf(hiveConfs)); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Please Load hive-service jar to the classpath to enable embedded mode"); + } catch (Exception e) { + throw new RuntimeException("Error initializing embedded mode", e); + } + return embeddedClient; + } + + private static HiveConf buildOverlayedConf(Map confOverlay) { + HiveConf conf = new HiveConf(); + if (confOverlay != null && !confOverlay.isEmpty()) { + // apply overlay query specific settings, if any + for (Map.Entry confEntry : confOverlay.entrySet()) { + try { + conf.set(confEntry.getKey(), confEntry.getValue()); + } catch (IllegalArgumentException e) { + throw new RuntimeException("Error applying statement specific settings", e); + } + } + } + return conf; + } + +} diff --git jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index bc332c1..d797d40 100644 --- jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -18,7 +18,6 @@ package org.apache.hive.jdbc; -import org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; @@ -29,14 +28,12 @@ import org.apache.hive.service.rpc.thrift.TSetClientInfoReq; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hive.common.auth.HiveAuthUtils; -import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.service.auth.HiveAuthConstants; import org.apache.hive.service.auth.KerberosSaslHelper; import org.apache.hive.service.auth.PlainSaslHelper; import org.apache.hive.service.auth.SaslQOP; import org.apache.hive.service.cli.session.SessionUtils; -import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService; import org.apache.hive.service.rpc.thrift.TCLIService; import org.apache.hive.service.rpc.thrift.TCancelDelegationTokenReq; import org.apache.hive.service.rpc.thrift.TCancelDelegationTokenResp; @@ -168,7 +165,7 @@ } return ZooKeeperHiveClientHelper.getDirectParamsList(params); } - + public static List getAllUrlStrings(String zookeeperBasedHS2Url) throws Exception { List jdbcUrls = new ArrayList<>(); List allConnectionParams = getAllUrls(zookeeperBasedHS2Url); @@ -182,7 +179,7 @@ } private static String makeDirectJDBCUrlFromConnectionParams(JdbcConnectionParams cp) { - // Direct JDBC Url format: + // Direct JDBC Url format: // jdbc:hive2://:/dbName;sess_var_list?hive_conf_list#hive_var_list StringBuilder url = new StringBuilder(""); if (cp != null) { @@ -273,7 +270,9 @@ wmPool = sessConfMap.get(JdbcConnectionParams.WM_POOL); for (String application : JdbcConnectionParams.APPLICATION) { wmApp = sessConfMap.get(application); - if (wmApp != null) break; + if (wmApp != null) { + break; + } } // add supported protocols @@ -289,9 +288,7 @@ supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V10); if (isEmbeddedMode) { - EmbeddedThriftBinaryCLIService embeddedClient = new EmbeddedThriftBinaryCLIService(); - embeddedClient.init(null, connParams.getHiveConfs()); - client = embeddedClient; + client = EmbeddedCLIServicePortal.get(connParams.getHiveConfs()); connParams.getHiveConfs().clear(); // open client session openSession(); @@ -1201,7 +1198,9 @@ @Override public String getClientInfo(String name) throws SQLException { - if (clientInfo == null) return null; + if (clientInfo == null) { + return null; + } return clientInfo.getProperty(name); } @@ -1505,8 +1504,11 @@ if (!autoCommit) { LOG.warn("Request to set autoCommit to false; Hive does not support autoCommit=false."); SQLWarning warning = new SQLWarning("Hive does not support autoCommit=false"); - if (warningChain == null) warningChain = warning; - else warningChain.setNextWarning(warning); + if (warningChain == null) { + warningChain = warning; + } else { + warningChain.setNextWarning(warning); + } } } @@ -1559,7 +1561,9 @@ Map map = new HashMap<>(); if (clientInfo != null) { for (Entry e : clientInfo.entrySet()) { - if (e.getKey() == null || e.getValue() == null) continue; + if (e.getKey() == null || e.getValue() == null) { + continue; + } map.put(e.getKey().toString(), e.getValue().toString()); } } diff --git pom.xml pom.xml index 2947a29..a8b02fd 100644 --- pom.xml +++ pom.xml @@ -1072,7 +1072,7 @@ maven-eclipse-plugin ${maven.eclipse.plugin.version} - true + false true target/eclipse/classes Hive diff --git service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java index e07cd7e..fe29b65 100644 --- service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -41,7 +41,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hive.service.cli.HiveSQLException; -import org.apache.hive.service.cli.thrift.ThriftCLIService; +import org.apache.hive.service.rpc.thrift.TCLIService; import org.apache.thrift.TProcessorFactory; import org.apache.thrift.transport.TSaslServerTransport; import org.apache.thrift.transport.TTransportException; @@ -175,7 +175,7 @@ * @return * @throws LoginException */ - public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws LoginException { + public TProcessorFactory getAuthProcFactory(TCLIService.Iface service) throws LoginException { if (isSASLWithKerberizedHadoop()) { return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service); } else { diff --git service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java index 14d2008..5d067f6 100644 --- service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java +++ service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java @@ -25,7 +25,6 @@ import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge; import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge.Server; import org.apache.hadoop.security.SecurityUtil; -import org.apache.hive.service.cli.thrift.ThriftCLIService; import org.apache.hive.service.rpc.thrift.TCLIService; import org.apache.hive.service.rpc.thrift.TCLIService.Iface; import org.apache.thrift.TProcessor; @@ -36,7 +35,7 @@ public final class KerberosSaslHelper { public static TProcessorFactory getKerberosProcessorFactory(Server saslServer, - ThriftCLIService service) { + TCLIService.Iface service) { return new CLIServiceProcessorFactory(saslServer, service); } @@ -108,10 +107,10 @@ private static class CLIServiceProcessorFactory extends TProcessorFactory { - private final ThriftCLIService service; + private final TCLIService.Iface service; private final Server saslServer; - public CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) { + public CLIServiceProcessorFactory(Server saslServer, TCLIService.Iface service) { super(null); this.service = service; this.saslServer = saslServer; diff --git service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java index 0742311..f2ba6be 100644 --- service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java +++ service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java @@ -34,7 +34,7 @@ import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods; import org.apache.hive.service.auth.PlainSaslServer.SaslPlainProvider; -import org.apache.hive.service.cli.thrift.ThriftCLIService; +import org.apache.hive.service.rpc.thrift.TCLIService; import org.apache.hive.service.rpc.thrift.TCLIService.Iface; import org.apache.thrift.TProcessor; import org.apache.thrift.TProcessorFactory; @@ -49,7 +49,7 @@ public final class PlainSaslHelper { private static final Logger LOG = LoggerFactory.getLogger(PlainSaslHelper.class); - public static TProcessorFactory getPlainProcessorFactory(ThriftCLIService service) { + public static TProcessorFactory getPlainProcessorFactory(TCLIService.Iface service) { return new SQLPlainProcessorFactory(service); } @@ -193,9 +193,9 @@ private static final class SQLPlainProcessorFactory extends TProcessorFactory { - private final ThriftCLIService service; + private final TCLIService.Iface service; - SQLPlainProcessorFactory(ThriftCLIService service) { + SQLPlainProcessorFactory(TCLIService.Iface service) { super(null); this.service = service; } diff --git service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java index 7ab7aee..a7d0de0 100644 --- service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java +++ service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java @@ -18,8 +18,6 @@ package org.apache.hive.service.cli.thrift; -import java.util.Map; - import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.service.cli.CLIService; import org.apache.hive.service.cli.ICLIService; @@ -40,26 +38,6 @@ @Override public synchronized void init(HiveConf hiveConf) { - init(hiveConf, null); - } - - public synchronized void init(HiveConf hiveConf, Map confOverlay) { - // Null HiveConf is passed in jdbc driver side code since driver side is supposed to be - // independent of conf object. Create new HiveConf object here in this case. - if (hiveConf == null) { - hiveConf = new HiveConf(); - } - // Set the specific parameters if needed - if (confOverlay != null && !confOverlay.isEmpty()) { - // apply overlay query specific settings, if any - for (Map.Entry confEntry : confOverlay.entrySet()) { - try { - hiveConf.set(confEntry.getKey(), confEntry.getValue()); - } catch (IllegalArgumentException e) { - throw new RuntimeException("Error applying statement specific settings", e); - } - } - } cliService.init(hiveConf); cliService.start(); super.init(hiveConf);