diff --git a/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java b/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java new file mode 100644 index 0000000..b4dac4b --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java @@ -0,0 +1,125 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.auth; + +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import javax.net.ssl.SSLParameters; +import javax.net.ssl.SSLServerSocket; +import javax.net.ssl.SSLSocket; + +import org.apache.thrift.transport.TSSLTransportFactory; +import org.apache.thrift.transport.TServerSocket; +import org.apache.thrift.transport.TSocket; +import org.apache.thrift.transport.TTransport; +import org.apache.thrift.transport.TTransportException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This class helps in some aspects of authentication. It creates the proper Thrift classes for the + * given configuration as well as helps with authenticating requests. + */ +public class HiveAuthUtils { + private static final Logger LOG = LoggerFactory.getLogger(HiveAuthUtils.class); + + public static TTransport getSocketTransport(String host, int port, int loginTimeout) { + return new TSocket(host, port, loginTimeout); + } + + public static TTransport getSSLSocket(String host, int port, int loginTimeout) + throws TTransportException { + // The underlying SSLSocket object is bound to host:port with the given SO_TIMEOUT + TSocket tSSLSocket = TSSLTransportFactory.getClientSocket(host, port, loginTimeout); + return getSSLSocketWithHttps(tSSLSocket); + } + + public static TTransport getSSLSocket(String host, int port, int loginTimeout, + String trustStorePath, String trustStorePassWord) throws TTransportException { + TSSLTransportFactory.TSSLTransportParameters params = + new TSSLTransportFactory.TSSLTransportParameters(); + params.setTrustStore(trustStorePath, trustStorePassWord); + params.requireClientAuth(true); + // The underlying SSLSocket object is bound to host:port with the given SO_TIMEOUT and + // SSLContext created with the given params + TSocket tSSLSocket = TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params); + return getSSLSocketWithHttps(tSSLSocket); + } + + // Using endpoint identification algorithm as HTTPS enables us to do + // CNAMEs/subjectAltName verification + private static TSocket getSSLSocketWithHttps(TSocket tSSLSocket) throws TTransportException { + SSLSocket sslSocket = (SSLSocket) tSSLSocket.getSocket(); + SSLParameters sslParams = sslSocket.getSSLParameters(); + sslParams.setEndpointIdentificationAlgorithm("HTTPS"); + sslSocket.setSSLParameters(sslParams); + return new TSocket(sslSocket); + } + + public static TServerSocket getServerSocket(String hiveHost, int portNum) + throws TTransportException { + InetSocketAddress serverAddress; + if (hiveHost == null || hiveHost.isEmpty()) { + // Wildcard bind + serverAddress = new InetSocketAddress(portNum); + } else { + serverAddress = new InetSocketAddress(hiveHost, portNum); + } + return new TServerSocket(serverAddress); + } + + public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, String keyStorePath, + String keyStorePassWord, List sslVersionBlacklist) throws TTransportException, + UnknownHostException { + TSSLTransportFactory.TSSLTransportParameters params = + new TSSLTransportFactory.TSSLTransportParameters(); + params.setKeyStore(keyStorePath, keyStorePassWord); + InetSocketAddress serverAddress; + if (hiveHost == null || hiveHost.isEmpty()) { + // Wildcard bind + serverAddress = new InetSocketAddress(portNum); + } else { + serverAddress = new InetSocketAddress(hiveHost, portNum); + } + TServerSocket thriftServerSocket = + TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress.getAddress(), params); + if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) { + List sslVersionBlacklistLocal = new ArrayList(); + for (String sslVersion : sslVersionBlacklist) { + sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase()); + } + SSLServerSocket sslServerSocket = (SSLServerSocket) thriftServerSocket.getServerSocket(); + List enabledProtocols = new ArrayList(); + for (String protocol : sslServerSocket.getEnabledProtocols()) { + if (sslVersionBlacklistLocal.contains(protocol.toLowerCase())) { + LOG.debug("Disabling SSL Protocol: " + protocol); + } else { + enabledProtocols.add(protocol); + } + } + sslServerSocket.setEnabledProtocols(enabledProtocols.toArray(new String[0])); + LOG.info("SSL Server Socket Enabled Protocols: " + + Arrays.toString(sslServerSocket.getEnabledProtocols())); + } + return thriftServerSocket; + } +} diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 6f168b5..15de10b 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -904,6 +904,18 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal METASTORE_INIT_METADATA_COUNT_ENABLED("hive.metastore.initial.metadata.count.enabled", true, "Enable a metadata count at metastore startup for metrics."), + // Metastore SSL settings + HIVE_METASTORE_USE_SSL("hive.metastore.use.SSL", false, + "Set this to true for using SSL encryption in HMS server."), + HIVE_METASTORE_SSL_KEYSTORE_PATH("hive.metastore.keystore.path", "", + "Metastore SSL certificate keystore location."), + HIVE_METASTORE_SSL_KEYSTORE_PASSWORD("hive.metastore.keystore.password", "", + "Metastore SSL certificate keystore password."), + HIVE_METASTORE_SSL_TRUSTSTORE_PATH("hive.metastore.truststore.path", "", + "Metastore SSL certificate keystore location."), + HIVE_METASTORE_SSL_TRUSTSTORE_PASSWORD("hive.metastore.truststore.password", "", + "Metastore SSL certificate keystore password."), + // Parameters for exporting metadata on table drop (requires the use of the) // org.apache.hadoop.hive.ql.parse.MetaDataExportListener preevent listener METADATA_EXPORT_LOCATION("hive.metadata.export.location", "", diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index ce85320..d6cf744 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -19,6 +19,7 @@ package org.apache.hive.jdbc; import org.apache.commons.lang.StringUtils; +import org.apache.hadoop.hive.common.auth.HiveAuthUtils; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.KerberosSaslHelper; @@ -487,14 +488,14 @@ private TTransport createUnderlyingTransport() throws TTransportException { JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD); if (sslTrustStore == null || sslTrustStore.isEmpty()) { - transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout); + transport = HiveAuthUtils.getSSLSocket(host, port, loginTimeout); } else { - transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout, + transport = HiveAuthUtils.getSSLSocket(host, port, loginTimeout, sslTrustStore, sslTrustStorePassword); } } else { // get non-SSL socket transport - transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout); + transport = HiveAuthUtils.getSocketTransport(host, port, loginTimeout); } return transport; } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 530d2f4..ab7932e 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hive.common.JvmPauseMonitor; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.auth.HiveAuthUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; @@ -111,7 +112,6 @@ import org.apache.thrift.server.TThreadPoolServer; import org.apache.thrift.transport.TFramedTransport; import org.apache.thrift.transport.TServerSocket; -import org.apache.thrift.transport.TServerTransport; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportFactory; import org.slf4j.Logger; @@ -6751,9 +6751,9 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, boolean tcpKeepAlive = conf.getBoolVar(HiveConf.ConfVars.METASTORE_TCP_KEEP_ALIVE); boolean useFramedTransport = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_FRAMED_TRANSPORT); boolean useCompactProtocol = conf.getBoolVar(ConfVars.METASTORE_USE_THRIFT_COMPACT_PROTOCOL); + boolean useSSL = conf.getBoolVar(ConfVars.HIVE_METASTORE_USE_SSL); useSasl = conf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL); - TProcessor processor; TTransportFactory transFactory; final TProtocolFactory protocolFactory; @@ -6804,10 +6804,31 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, } } - TServerTransport serverTransport = tcpKeepAlive ? - new TServerSocketKeepAlive(port) : new TServerSocket(port); + // enable SSL support for HMS + TServerSocket serverSocket = null; + List sslVersionBlacklist = new ArrayList(); + for (String sslVersion : conf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",")) { + sslVersionBlacklist.add(sslVersion); + } + if (!useSSL) { + serverSocket = HiveAuthUtils.getServerSocket(null, port); + } else { + String keyStorePath = conf.getVar(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PATH).trim(); + if (keyStorePath.isEmpty()) { + throw new IllegalArgumentException(ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname + + " Not configured for SSL connection"); + } + String keyStorePassword = ShimLoader.getHadoopShims().getPassword(conf, + HiveConf.ConfVars.HIVE_METASTORE_SSL_KEYSTORE_PASSWORD.varname); + serverSocket = HiveAuthUtils.getServerSSLSocket(null, port, keyStorePath, + keyStorePassword, sslVersionBlacklist); + } + + if (tcpKeepAlive) { + serverSocket = new TServerSocketKeepAlive(serverSocket); + } - TThreadPoolServer.Args args = new TThreadPoolServer.Args(serverTransport) + TThreadPoolServer.Args args = new TThreadPoolServer.Args(serverSocket) .processor(processor) .transportFactory(transFactory) .protocolFactory(protocolFactory) diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index c32486f..a6cc1c9 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -21,6 +21,7 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.common.ValidTxnList; +import org.apache.hadoop.hive.common.auth.HiveAuthUtils; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public; import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable; @@ -139,6 +140,7 @@ import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.protocol.TProtocol; import org.apache.thrift.transport.TFramedTransport; +import org.apache.thrift.transport.TSSLTransportFactory; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; @@ -425,6 +427,7 @@ public void renamePartition(final String dbname, final String name, final List sslVersionBlacklist) throws TTransportException, - UnknownHostException { - TSSLTransportFactory.TSSLTransportParameters params = - new TSSLTransportFactory.TSSLTransportParameters(); - params.setKeyStore(keyStorePath, keyStorePassWord); - InetSocketAddress serverAddress; - if (hiveHost == null || hiveHost.isEmpty()) { - // Wildcard bind - serverAddress = new InetSocketAddress(portNum); - } else { - serverAddress = new InetSocketAddress(hiveHost, portNum); - } - TServerSocket thriftServerSocket = - TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress.getAddress(), params); - if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) { - List sslVersionBlacklistLocal = new ArrayList(); - for (String sslVersion : sslVersionBlacklist) { - sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase()); - } - SSLServerSocket sslServerSocket = (SSLServerSocket) thriftServerSocket.getServerSocket(); - List enabledProtocols = new ArrayList(); - for (String protocol : sslServerSocket.getEnabledProtocols()) { - if (sslVersionBlacklistLocal.contains(protocol.toLowerCase())) { - LOG.debug("Disabling SSL Protocol: " + protocol); - } else { - enabledProtocols.add(protocol); - } - } - sslServerSocket.setEnabledProtocols(enabledProtocols.toArray(new String[0])); - LOG.info("SSL Server Socket Enabled Protocols: " - + Arrays.toString(sslServerSocket.getEnabledProtocols())); - } - return thriftServerSocket; - } - // retrieve delegation token for the given user public String getDelegationToken(String owner, String renewer, String remoteAddr) throws HiveSQLException { diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java index d9c7b2e..94613d8 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java @@ -24,6 +24,7 @@ import java.util.concurrent.SynchronousQueue; import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.common.auth.HiveAuthUtils; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -74,7 +75,7 @@ public void run() { sslVersionBlacklist.add(sslVersion); } if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL)) { - serverSocket = HiveAuthFactory.getServerSocket(hiveHost, portNum); + serverSocket = HiveAuthUtils.getServerSocket(hiveHost, portNum); } else { String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim(); if (keyStorePath.isEmpty()) { @@ -83,7 +84,7 @@ public void run() { } String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf, HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname); - serverSocket = HiveAuthFactory.getServerSSLSocket(hiveHost, portNum, keyStorePath, + serverSocket = HiveAuthUtils.getServerSSLSocket(hiveHost, portNum, keyStorePath, keyStorePassword, sslVersionBlacklist); }