diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 80b1e98..1d05fe1 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -7081,10 +7081,9 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, ServerMode.METASTORE); saslServer.setSecretManager(delegationTokenManager.getSecretManager()); transFactory = saslServer.createTransportFactory( - MetaStoreUtils.getMetaStoreSaslProperties(conf)); + MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); processor = saslServer.wrapProcessor( new ThriftHiveMetastore.Processor(handler)); - serverSocket = HiveAuthUtils.getServerSocket(null, port); LOG.info("Starting DB backed MetaStore Server in Secure Mode"); } else { @@ -7103,25 +7102,27 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, processor = new TSetIpAddressProcessor(handler); LOG.info("Starting DB backed MetaStore Server"); } + } + + if (!useSSL) { + serverSocket = HiveAuthUtils.getServerSocket(null, port); + } else { + String keyStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH).trim(); + if (keyStorePath.isEmpty()) { + throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH.varname + + " Not configured for SSL connection"); + } + String keyStorePassword = ShimLoader.getHadoopShims().getPassword(conf, + HiveConf.ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname); // enable SSL support for HMS List sslVersionBlacklist = new ArrayList(); for (String sslVersion : conf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",")) { sslVersionBlacklist.add(sslVersion); } - if (!useSSL) { - serverSocket = HiveAuthUtils.getServerSocket(null, port); - } else { - String keyStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH).trim(); - if (keyStorePath.isEmpty()) { - throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname - + " Not configured for SSL connection"); - } - String keyStorePassword = ShimLoader.getHadoopShims().getPassword(conf, - HiveConf.ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname); - serverSocket = HiveAuthUtils.getServerSSLSocket(null, port, keyStorePath, - keyStorePassword, sslVersionBlacklist); - } + + serverSocket = HiveAuthUtils.getServerSSLSocket(null, port, keyStorePath, + keyStorePassword, sslVersionBlacklist); } if (tcpKeepAlive) { @@ -7183,6 +7184,7 @@ public void processContext(ServerContext serverContext, TTransport tTransport, T HMSHandler.LOG.info("Options.maxWorkerThreads = " + maxWorkerThreads); HMSHandler.LOG.info("TCP keepalive = " + tcpKeepAlive); + HMSHandler.LOG.info("Enable SSL = " + useSSL); if (startLock != null) { signalOtherThreadsToStart(tServer, startLock, startCondition, startedServing); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 7002620..dcb14e8 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -392,6 +392,29 @@ private void open() throws MetaException { LOG.info("Trying to connect to metastore with URI " + store); try { + if (useSSL) { + try { + String trustStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH).trim(); + if (trustStorePath.isEmpty()) { + throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH.varname + + " Not configured for SSL connection"); + } + String trustStorePassword = ShimLoader.getHadoopShims().getPassword(conf, + HiveConf.ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD.varname); + + // Create an SSL socket and connect + transport = HiveAuthUtils.getSSLSocket(store.getHost(), store.getPort(), clientSocketTimeout, trustStorePath, trustStorePassword ); + LOG.info("Opened an SSL connection to metastore, current connections: " + connCount.incrementAndGet()); + } catch(IOException e) { + throw new IllegalArgumentException(e); + } catch(TTransportException e) { + tte = e; + throw new MetaException(e.toString()); + } + } else { + transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout); + } + if (useSasl) { // Wrap thrift connection with SASL for secure connection. try { @@ -406,48 +429,24 @@ private void open() throws MetaException { String tokenSig = conf.getVar(ConfVars.METASTORE_TOKEN_SIGNATURE); // tokenSig could be null tokenStrForm = Utils.getTokenStrForm(tokenSig); - transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout); if(tokenStrForm != null) { // authenticate using delegation tokens via the "DIGEST" mechanism transport = authBridge.createClientTransport(null, store.getHost(), "DIGEST", tokenStrForm, transport, - MetaStoreUtils.getMetaStoreSaslProperties(conf)); + MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); } else { String principalConfig = conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL); transport = authBridge.createClientTransport( principalConfig, store.getHost(), "KERBEROS", null, - transport, MetaStoreUtils.getMetaStoreSaslProperties(conf)); + transport, MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL)); } } catch (IOException ioe) { LOG.error("Couldn't create client transport", ioe); throw new MetaException(ioe.toString()); } } else { - if (useSSL) { - try { - String trustStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH).trim(); - if (trustStorePath.isEmpty()) { - throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PATH.varname - + " Not configured for SSL connection"); - } - String trustStorePassword = ShimLoader.getHadoopShims().getPassword(conf, - HiveConf.ConfVars.HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD.varname); - - // Create an SSL socket and connect - transport = HiveAuthUtils.getSSLSocket(store.getHost(), store.getPort(), clientSocketTimeout, trustStorePath, trustStorePassword ); - LOG.info("Opened an SSL connection to metastore, current connections: " + connCount.incrementAndGet()); - } catch(IOException e) { - throw new IllegalArgumentException(e); - } catch(TTransportException e) { - tte = e; - throw new MetaException(e.toString()); - } - } else { - transport = new TSocket(store.getHost(), store.getPort(), clientSocketTimeout); - } - if (useFramedTransport) { transport = new TFramedTransport(transport); } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 79f6d7f..1b701e0 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -47,6 +47,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -86,6 +87,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge; +import org.apache.hadoop.security.SaslRpcServer; import org.apache.hive.common.util.HiveStringUtils; import org.apache.hive.common.util.ReflectionUtil; @@ -1761,8 +1763,19 @@ public static boolean compareFieldColumns(List schema1, List getMetaStoreSaslProperties(HiveConf conf) { + public static Map getMetaStoreSaslProperties(HiveConf conf, boolean useSSL) { // As of now Hive Meta Store uses the same configuration as Hadoop SASL configuration + + // If SSL is enabled, override the given value of "hadoop.rpc.protection" and set it to "authentication" + // This disables any encryption provided by SASL, since SSL already provides it + String hadoopRpcProtectionVal = conf.get(CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION); + String hadoopRpcProtectionAuth = SaslRpcServer.QualityOfProtection.AUTHENTICATION.toString(); + + if (useSSL && hadoopRpcProtectionVal != null && !hadoopRpcProtectionVal.equals(hadoopRpcProtectionAuth)) { + LOG.warn("Overriding value of " + CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION + " setting it from " + + hadoopRpcProtectionVal + " to " + hadoopRpcProtectionAuth + " because SSL is enabled"); + conf.set(CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION, hadoopRpcProtectionAuth); + } return ShimLoader.getHadoopThriftAuthBridge().getHadoopSaslProperties(conf); }