diff --git a/bin/beeline b/bin/beeline index d247c39..bceb7b9 100644 --- a/bin/beeline +++ b/bin/beeline @@ -18,4 +18,8 @@ bin=`dirname "$0"` bin=`cd "$bin"; pwd` +# Set Hadoop User classpath to true so that httpclient jars are taken from +# hive lib instead of hadoop lib. +export HADOOP_USER_CLASSPATH_FIRST=true + . "$bin"/hive --service beeline "$@" diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java index 30c4e99..28a3777 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java @@ -155,7 +155,7 @@ public void testSSLVersion() throws Exception { cause = cause.getCause(); } Assert.assertEquals("org.apache.http.NoHttpResponseException", cause.getClass().getName()); - Assert.assertEquals("The target server failed to respond", cause.getMessage()); + Assert.assertTrue(cause.getMessage().contains("failed to respond")); } miniHS2.stop(); } diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java index 3e913da..970e904 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java @@ -160,7 +160,7 @@ private static TTransport getHttpTransport() throws Exception { String httpUrl = transportMode + "://" + host + ":" + port + "/" + thriftHttpPath + "/"; httpClient.addRequestInterceptor( - new HttpBasicAuthInterceptor(USERNAME, PASSWORD)); + new HttpBasicAuthInterceptor(USERNAME, PASSWORD, null, null)); return new THttpClient(httpUrl, httpClient); } diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 764a3f1..ea3b798 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -50,10 +50,11 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.protocol.HttpContext; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.KerberosSaslHelper; @@ -73,9 +74,17 @@ import org.apache.hive.service.cli.thrift.TRenewDelegationTokenResp; import org.apache.hive.service.cli.thrift.TSessionHandle; import org.apache.http.HttpRequestInterceptor; -import org.apache.http.conn.scheme.Scheme; +import org.apache.http.HttpResponse; +import org.apache.http.client.CookieStore; +import org.apache.http.client.ServiceUnavailableRetryStrategy; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.socket.ConnectionSocketFactory; import org.apache.http.conn.ssl.SSLSocketFactory; -import org.apache.http.impl.client.DefaultHttpClient; +import org.apache.http.impl.client.BasicCookieStore; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.conn.BasicHttpClientConnectionManager; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.THttpClient; @@ -235,7 +244,7 @@ private String getServerHttpUrl(boolean useSsl) { } private TTransport createHttpTransport() throws SQLException, TTransportException { - DefaultHttpClient httpClient; + CloseableHttpClient httpClient; boolean useSsl = isSslConnection(); // Create an http client from the configs httpClient = getHttpClient(useSsl); @@ -259,35 +268,76 @@ private TTransport createHttpTransport() throws SQLException, TTransportExceptio return transport; } - private DefaultHttpClient getHttpClient(Boolean useSsl) throws SQLException { - DefaultHttpClient httpClient = new DefaultHttpClient(); + private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException { + boolean isCookieEnabled = sessConfMap.get(JdbcConnectionParams.COOKIE_AUTH) == null || + (!JdbcConnectionParams.COOKIE_AUTH_FALSE.equalsIgnoreCase( + sessConfMap.get(JdbcConnectionParams.COOKIE_AUTH))); + String cookieName = sessConfMap.get(JdbcConnectionParams.COOKIE_NAME) == null ? + JdbcConnectionParams.DEFAULT_COOKIE_NAMES_HS2 : + sessConfMap.get(JdbcConnectionParams.COOKIE_NAME); + CookieStore cookieStore = isCookieEnabled ? new BasicCookieStore() : null; + HttpClientBuilder httpClientBuilder; // Request interceptor for any request pre-processing logic HttpRequestInterceptor requestInterceptor; - // If Kerberos + + // Configure http client for kerberos/password based authentication if (isKerberosAuthMode()) { /** * Add an interceptor which sets the appropriate header in the request. * It does the kerberos authentication and get the final service ticket, * for sending to the server before every request. * In https mode, the entire information is encrypted - * TODO: Optimize this with a mix of kerberos + using cookie. */ requestInterceptor = new HttpKerberosRequestInterceptor(sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), - host, getServerHttpUrl(useSsl), assumeSubject); + host, getServerHttpUrl(useSsl), assumeSubject, cookieStore, cookieName); } else { /** * Add an interceptor to pass username/password in the header. * In https mode, the entire information is encrypted */ - requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword()); + requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword(), + cookieStore, cookieName); + } + // Configure http client for cookie based authentication + if (isCookieEnabled) { + // Create a http client with a retry mechanism when the server returns a status code of 401. + httpClientBuilder = + HttpClients.custom().setServiceUnavailableRetryStrategy( + new ServiceUnavailableRetryStrategy() { + + @Override + public boolean retryRequest( + final HttpResponse response, + final int executionCount, + final HttpContext context) { + int statusCode = response.getStatusLine().getStatusCode(); + boolean ret = statusCode == 401 && executionCount <= 1; + + // Set the context attribute to true which will be interpreted by the request interceptor + if (ret) { + context.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_TRUE); + } + return ret; + } + + @Override + public long getRetryInterval() { + // Immediate retry + return 0; + } + }); + } else { + httpClientBuilder = HttpClientBuilder.create(); } - // Configure httpClient for SSL + // Add the request interceptor to the client builder + httpClientBuilder.addInterceptorFirst(requestInterceptor); + // Configure http client for SSL if (useSsl) { String sslTrustStorePath = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE); String sslTrustStorePassword = sessConfMap.get( - JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD); + JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD); KeyStore sslTrustStore; SSLSocketFactory socketFactory; /** @@ -311,21 +361,25 @@ private DefaultHttpClient getHttpClient(Boolean useSsl) throws SQLException { // Pick trust store config from the given path sslTrustStore = KeyStore.getInstance(JdbcConnectionParams.SSL_TRUST_STORE_TYPE); sslTrustStore.load(new FileInputStream(sslTrustStorePath), - sslTrustStorePassword.toCharArray()); + sslTrustStorePassword.toCharArray()); socketFactory = new SSLSocketFactory(sslTrustStore); } socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); - Scheme sslScheme = new Scheme("https", 443, socketFactory); - httpClient.getConnectionManager().getSchemeRegistry().register(sslScheme); + + final Registry registry = + RegistryBuilder.create() + .register("https", socketFactory) + .build(); + + httpClientBuilder.setConnectionManager(new BasicHttpClientConnectionManager(registry)); } catch (Exception e) { String msg = "Could not create an https connection to " + - jdbcUriString + ". " + e.getMessage(); + jdbcUriString + ". " + e.getMessage(); throw new SQLException(msg, " 08S01", e); } } - httpClient.addRequestInterceptor(requestInterceptor); - return httpClient; + return httpClientBuilder.build(); } /** diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java index dd4f62a..9676651 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java @@ -25,6 +25,8 @@ import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CookieStore; +import org.apache.http.client.protocol.ClientContext; import org.apache.http.impl.auth.AuthSchemeBase; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.protocol.HttpContext; @@ -37,20 +39,42 @@ public class HttpBasicAuthInterceptor implements HttpRequestInterceptor { UsernamePasswordCredentials credentials; AuthSchemeBase authScheme; + CookieStore cookieStore; + boolean isCookieEnabled; + String cookieName; - public HttpBasicAuthInterceptor(String username, String password) { + public HttpBasicAuthInterceptor(String username, String password, CookieStore cookieStore, + String cn) { if(username != null){ credentials = new UsernamePasswordCredentials(username, password); } authScheme = new BasicScheme(); + this.cookieStore = cookieStore; + isCookieEnabled = (cookieStore != null); + cookieName = cn; } @Override public void process(HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException { - Header basicAuthHeader = authScheme.authenticate( - credentials, httpRequest, httpContext); - httpRequest.addHeader(basicAuthHeader); + if (isCookieEnabled) { + httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore); + } + // Add the authentication details under the following scenarios: + // 1. Cookie Authentication is disabled OR + // 2. The first time when the request is sent OR + // 3. The server returns a 401, which sometimes means the cookie has expired + if (!isCookieEnabled || ((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null && + (cookieStore == null || (cookieStore != null && + Utils.needToSendCredentials(cookieStore, cookieName)))) || + (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null && + httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY). + equals(Utils.HIVE_SERVER2_RETRY_TRUE)))) { + Header basicAuthHeader = authScheme.authenticate(credentials, httpRequest, httpContext); + httpRequest.addHeader(basicAuthHeader); + } + if (isCookieEnabled) { + httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_FALSE); + } } - } diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java index 2d21547..8bb4bf6 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java @@ -25,6 +25,8 @@ import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; +import org.apache.http.client.CookieStore; +import org.apache.http.client.protocol.ClientContext; import org.apache.http.protocol.HttpContext; /** @@ -40,31 +42,59 @@ String host; String serverHttpUrl; boolean assumeSubject; + CookieStore cookieStore; + boolean isCookieEnabled; + String cookieName; // A fair reentrant lock private static ReentrantLock kerberosLock = new ReentrantLock(true); public HttpKerberosRequestInterceptor(String principal, String host, - String serverHttpUrl, boolean assumeSubject) { + String serverHttpUrl, boolean assumeSubject, CookieStore cs, String cn) { this.principal = principal; this.host = host; this.serverHttpUrl = serverHttpUrl; this.assumeSubject = assumeSubject; + this.cookieStore = cs; + isCookieEnabled = (cs != null); + cookieName = cn; } @Override public void process(HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException { String kerberosAuthHeader; + try { // Generate the service ticket for sending to the server. // Locking ensures the tokens are unique in case of concurrent requests kerberosLock.lock(); - kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket( - principal, host, serverHttpUrl, assumeSubject); - // Set the session key token (Base64 encoded) in the headers - httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " + - HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader); + // If cookie based authentication is allowed, generate ticket only when necessary. + // The necessary condition is either when there are no server side cookies in the + // cookiestore which can be send back or when the server returns a 401 error code + // indicating that the previous cookie has expired. + if (isCookieEnabled) { + httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore); + } + // Generate the kerberos ticket under the following scenarios: + // 1. Cookie Authentication is disabled OR + // 2. The first time when the request is sent OR + // 3. The server returns a 401, which sometimes means the cookie has expired + if (!isCookieEnabled || ((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null && + (cookieStore == null || (cookieStore != null && + Utils.needToSendCredentials(cookieStore, cookieName)))) || + (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null && + httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY). + equals(Utils.HIVE_SERVER2_RETRY_TRUE)))) { + kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket( + principal, host, serverHttpUrl, assumeSubject); + // Set the session key token (Base64 encoded) in the headers + httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " + + HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader); + } + if (isCookieEnabled) { + httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_FALSE); + } } catch (Exception e) { throw new HttpException(e.getMessage(), e); } diff --git a/jdbc/src/java/org/apache/hive/jdbc/Utils.java b/jdbc/src/java/org/apache/hive/jdbc/Utils.java index a27a532..791ecc7 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/Utils.java +++ b/jdbc/src/java/org/apache/hive/jdbc/Utils.java @@ -34,6 +34,8 @@ import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.thrift.TStatus; import org.apache.hive.service.cli.thrift.TStatusCode; +import org.apache.http.client.CookieStore; +import org.apache.http.cookie.Cookie; public class Utils { public static final Log LOG = LogFactory.getLog(Utils.class.getName()); @@ -56,6 +58,11 @@ private static final String URI_HIVE_PREFIX = "hive2:"; + // This value is set to true by the setServiceUnavailableRetryStrategy() when the server returns 401 + static final String HIVE_SERVER2_RETRY_KEY = "hive.server2.retryserver"; + static final String HIVE_SERVER2_RETRY_TRUE = "true"; + static final String HIVE_SERVER2_RETRY_FALSE = "false"; + public static class JdbcConnectionParams { // Note on client side parameter naming convention: // Prefer using a shorter camelCase param name instead of using the same name as the @@ -98,6 +105,11 @@ // Default namespace value on ZooKeeper. // This value is used if the param "zooKeeperNamespace" is not specified in the JDBC Uri. static final String ZOOKEEPER_DEFAULT_NAMESPACE = "hiveserver2"; + static final String COOKIE_AUTH = "cookieAuth"; + static final String COOKIE_AUTH_FALSE = "false"; + static final String COOKIE_NAME = "cookieName"; + // The default value of the cookie name when CookieAuth=true + static final String DEFAULT_COOKIE_NAMES_HS2 = "hive.server2.auth"; // Non-configurable params: // Currently supports JKS keystore format @@ -560,4 +572,28 @@ static int getVersionPart(String fullVersion, int position) { } return version; } + + /** + * The function iterates through the list of cookies in the cookiestore and tries to + * match them with the cookieName. If there is a match, the cookieStore already + * has a valid cookie and the client need not send Credentials for validation purpose. + * @param cookieStore The cookie Store + * @param cookieName Name of the cookie which needs to be validated + * @return true or false based on whether the client needs to send the credentials or + * not to the server. + */ + static boolean needToSendCredentials(CookieStore cookieStore, String cookieName) { + if (cookieName == null || cookieStore == null) { + return true; + } + + List cookies = cookieStore.getCookies(); + + for (Cookie c : cookies) { + if (c.getName().equals(cookieName)) { + return false; + } + } + return true; + } } diff --git a/pom.xml b/pom.xml index a9a901e..64d1d37 100644 --- a/pom.xml +++ b/pom.xml @@ -124,8 +124,8 @@ 0.98.9-hadoop1 0.98.9-hadoop2 - 4.2.5 - 4.2.5 + 4.4 + 4.4 2.4.0 1.9.2 0.3.2 @@ -1083,6 +1083,16 @@ org.apache.hadoop hadoop-common ${hadoop-23.version} + + + org.apache.httpcomponents + httpcore + + + org.apache.httpcomponents + httpclient + + org.apache.hadoop