diff --git a/bin/beeline b/bin/beeline index d247c39..bceb7b9 100644 --- a/bin/beeline +++ b/bin/beeline @@ -18,4 +18,8 @@ bin=`dirname "$0"` bin=`cd "$bin"; pwd` +# Set Hadoop User classpath to true so that httpclient jars are taken from +# hive lib instead of hadoop lib. +export HADOOP_USER_CLASSPATH_FIRST=true + . "$bin"/hive --service beeline "$@" diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index cf82e8b..589ff6f 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1598,6 +1598,7 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { "Top level directory where operation logs are stored if logging functionality is enabled"), HIVE_SERVER2_LOGGING_OPERATION_VERBOSE("hive.server2.logging.operation.verbose", false, "When true, HS2 operation logs available for clients will be verbose"), + // logging configuration HIVE_LOG4J_FILE("hive.log4j.file", "", "Hive log4j configuration file.\n" + @@ -1700,6 +1701,17 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { "Keepalive time for an idle http worker thread. When the number of workers exceeds min workers, " + "excessive threads are killed after this time interval."), + // Cookie based authentication + HIVE_SERVER2_THRIFT_HTTP_COOKIE_AUTH_ENABLED("hive.server2.cookie.auth.enabled", true, + "When true, HiveServer2 in HTTP transport mode, will use cookie based authentication mechanism."), + HIVE_SERVER2_THRIFT_HTTP_COOKIE_MAX_AGE("hive.server2.cookie.max.age", "86400", + new TimeValidator(TimeUnit.SECONDS), + "Maximum age in seconds for server side cookie used by HS2 in HTTP mode."), + HIVE_SERVER2_THRIFT_HTTP_COOKIE_DOMAIN("hive.server2.cookie.domain", null, + "Domain for the HS2 generated cookies"), + HIVE_SERVER2_THRIFT_HTTP_COOKIE_PATH("hive.server2.cookie.path", null, + "Path for the HS2 generated cookies"), + // binary transport settings HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000, "Port number of HiveServer2 Thrift interface when hive.server2.transport.mode is 'binary'."), diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java index 3e913da..970e904 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java @@ -160,7 +160,7 @@ private static TTransport getHttpTransport() throws Exception { String httpUrl = transportMode + "://" + host + ":" + port + "/" + thriftHttpPath + "/"; httpClient.addRequestInterceptor( - new HttpBasicAuthInterceptor(USERNAME, PASSWORD)); + new HttpBasicAuthInterceptor(USERNAME, PASSWORD, null, null)); return new THttpClient(httpUrl, httpClient); } diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 764a3f1..d9d06d3 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -50,10 +50,11 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.protocol.HttpContext; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.KerberosSaslHelper; @@ -73,9 +74,13 @@ import org.apache.hive.service.cli.thrift.TRenewDelegationTokenResp; import org.apache.hive.service.cli.thrift.TSessionHandle; import org.apache.http.HttpRequestInterceptor; +import org.apache.http.HttpResponse; +import org.apache.http.client.CookieStore; +import org.apache.http.client.ServiceUnavailableRetryStrategy; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.ssl.SSLSocketFactory; -import org.apache.http.impl.client.DefaultHttpClient; +import org.apache.http.impl.client.BasicCookieStore; +import org.apache.http.impl.client.HttpClients; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.THttpClient; @@ -235,7 +240,7 @@ private String getServerHttpUrl(boolean useSsl) { } private TTransport createHttpTransport() throws SQLException, TTransportException { - DefaultHttpClient httpClient; + CloseableHttpClient httpClient; boolean useSsl = isSslConnection(); // Create an http client from the configs httpClient = getHttpClient(useSsl); @@ -259,8 +264,16 @@ private TTransport createHttpTransport() throws SQLException, TTransportExceptio return transport; } - private DefaultHttpClient getHttpClient(Boolean useSsl) throws SQLException { - DefaultHttpClient httpClient = new DefaultHttpClient(); + private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException { + CloseableHttpClient httpClient = null; + boolean isCookieEnabled = sessConfMap.get(JdbcConnectionParams.COOKIE_AUTH) == null || + (!JdbcConnectionParams.COOKIE_AUTH_FALSE.equals( + sessConfMap.get(JdbcConnectionParams.COOKIE_AUTH).toLowerCase())); + String cookieNamesStr = sessConfMap.get(JdbcConnectionParams.COOKIE_NAMES) == null ? + JdbcConnectionParams.COOKIE_NAMES_HS2 : sessConfMap.get(JdbcConnectionParams.COOKIE_NAMES); + String cookieNames[] = cookieNamesStr.split(","); + CookieStore cookieStore = isCookieEnabled ? new BasicCookieStore() : null; + // Request interceptor for any request pre-processing logic HttpRequestInterceptor requestInterceptor; // If Kerberos @@ -270,18 +283,18 @@ private DefaultHttpClient getHttpClient(Boolean useSsl) throws SQLException { * It does the kerberos authentication and get the final service ticket, * for sending to the server before every request. * In https mode, the entire information is encrypted - * TODO: Optimize this with a mix of kerberos + using cookie. */ requestInterceptor = new HttpKerberosRequestInterceptor(sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), - host, getServerHttpUrl(useSsl), assumeSubject); + host, getServerHttpUrl(useSsl), assumeSubject, cookieStore, cookieNames); } else { /** * Add an interceptor to pass username/password in the header. * In https mode, the entire information is encrypted */ - requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword()); + requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword(), + cookieStore, cookieNames); } // Configure httpClient for SSL if (useSsl) { @@ -324,8 +337,27 @@ private DefaultHttpClient getHttpClient(Boolean useSsl) throws SQLException { throw new SQLException(msg, " 08S01", e); } } - httpClient.addRequestInterceptor(requestInterceptor); - return httpClient; + // Create a http client with a retry mechanism when the server returns a status code of 401. + httpClient = HttpClients.custom().setServiceUnavailableRetryStrategy(new ServiceUnavailableRetryStrategy() { + @Override + public boolean retryRequest( + final HttpResponse response, final int executionCount, final HttpContext context) { + int statusCode = response.getStatusLine().getStatusCode(); + boolean ret = statusCode == 401 && executionCount < 1; + // Set the context attribute to true which will be interpreted by the request interceptor + if (ret) { + context.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_TRUE); + } + return ret; + } + + @Override + public long getRetryInterval() { + // Immediate retry + return 0; + } + }).addInterceptorFirst(requestInterceptor).build(); + return httpClient; } /** diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java index dd4f62a..86dbecd 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java @@ -25,6 +25,8 @@ import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CookieStore; +import org.apache.http.client.protocol.ClientContext; import org.apache.http.impl.auth.AuthSchemeBase; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.protocol.HttpContext; @@ -37,20 +39,41 @@ public class HttpBasicAuthInterceptor implements HttpRequestInterceptor { UsernamePasswordCredentials credentials; AuthSchemeBase authScheme; + CookieStore cookieStore; + boolean isCookieEnabled; + String cookieNames[]; - public HttpBasicAuthInterceptor(String username, String password) { + public HttpBasicAuthInterceptor(String username, String password, CookieStore cookieStore, + String cn[]) { if(username != null){ credentials = new UsernamePasswordCredentials(username, password); } authScheme = new BasicScheme(); + this.cookieStore = cookieStore; + isCookieEnabled = (cookieStore != null); + cookieNames = cn; } @Override public void process(HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException { - Header basicAuthHeader = authScheme.authenticate( - credentials, httpRequest, httpContext); - httpRequest.addHeader(basicAuthHeader); + if (isCookieEnabled) { + httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore); + } + // Add the authentication details under the following scenarios: + // 1. The first time when the request is sent OR + // 2. The server returns a 401, which sometimes means the cookie has expired + if (!isCookieEnabled || ((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null && + (cookieStore == null || (cookieStore != null && + Utils.needToSendCredentials(cookieStore, cookieNames)))) || + (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null && + httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY). + equals(Utils.HIVE_SERVER2_RETRY_TRUE)))) { + Header basicAuthHeader = authScheme.authenticate(credentials, httpRequest, httpContext); + httpRequest.addHeader(basicAuthHeader); + } + if (isCookieEnabled) { + httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_FALSE); + } } - } diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java index 2d21547..d3fc59b 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java @@ -25,6 +25,8 @@ import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; +import org.apache.http.client.CookieStore; +import org.apache.http.client.protocol.ClientContext; import org.apache.http.protocol.HttpContext; /** @@ -40,31 +42,58 @@ String host; String serverHttpUrl; boolean assumeSubject; + CookieStore cookieStore; + boolean isCookieEnabled; + String cookieNames[]; // A fair reentrant lock private static ReentrantLock kerberosLock = new ReentrantLock(true); public HttpKerberosRequestInterceptor(String principal, String host, - String serverHttpUrl, boolean assumeSubject) { + String serverHttpUrl, boolean assumeSubject, CookieStore cs, String cn[]) { this.principal = principal; this.host = host; this.serverHttpUrl = serverHttpUrl; this.assumeSubject = assumeSubject; + this.cookieStore = cs; + isCookieEnabled = (cs != null); + cookieNames = cn; } @Override public void process(HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException { String kerberosAuthHeader; + try { // Generate the service ticket for sending to the server. // Locking ensures the tokens are unique in case of concurrent requests kerberosLock.lock(); - kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket( - principal, host, serverHttpUrl, assumeSubject); - // Set the session key token (Base64 encoded) in the headers - httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " + - HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader); + // If cookie based authentication is allowed, generate ticket only when necessary. + // The necessary condition is either when there are no server side cookies in the + // cookiestore which can be send back or when the server returns a 401 error code + // indicating that the previous cookie has expired. + if (isCookieEnabled) { + httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore); + } + // Generate the kerberos ticket under the following scenarios: + // 1. The first time when the request is sent OR + // 2. The server returns a 401, which sometimes means the cookie has expired + if (!isCookieEnabled || ((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null && + (cookieStore == null || (cookieStore != null && + Utils.needToSendCredentials(cookieStore, cookieNames)))) || + (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null && + httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY). + equals(Utils.HIVE_SERVER2_RETRY_TRUE)))) { + kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket( + principal, host, serverHttpUrl, assumeSubject); + // Set the session key token (Base64 encoded) in the headers + httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " + + HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader); + } + if (isCookieEnabled) { + httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_FALSE); + } } catch (Exception e) { throw new HttpException(e.getMessage(), e); } diff --git a/jdbc/src/java/org/apache/hive/jdbc/Utils.java b/jdbc/src/java/org/apache/hive/jdbc/Utils.java index a27a532..c372062 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/Utils.java +++ b/jdbc/src/java/org/apache/hive/jdbc/Utils.java @@ -34,6 +34,8 @@ import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.cli.thrift.TStatus; import org.apache.hive.service.cli.thrift.TStatusCode; +import org.apache.http.client.CookieStore; +import org.apache.http.cookie.Cookie; public class Utils { public static final Log LOG = LogFactory.getLog(Utils.class.getName()); @@ -56,6 +58,11 @@ private static final String URI_HIVE_PREFIX = "hive2:"; + // This value is set to true by the setServiceUnavailableRetryStrategy() when the server returns 401 + static final String HIVE_SERVER2_RETRY_KEY = "hive.server2.retryserver"; + static final String HIVE_SERVER2_RETRY_TRUE = "true"; + static final String HIVE_SERVER2_RETRY_FALSE = "false"; + public static class JdbcConnectionParams { // Note on client side parameter naming convention: // Prefer using a shorter camelCase param name instead of using the same name as the @@ -98,6 +105,10 @@ // Default namespace value on ZooKeeper. // This value is used if the param "zooKeeperNamespace" is not specified in the JDBC Uri. static final String ZOOKEEPER_DEFAULT_NAMESPACE = "hiveserver2"; + static final String COOKIE_AUTH = "cookieAuth"; + static final String COOKIE_AUTH_FALSE = "false"; + static final String COOKIE_NAMES = "cookieNames"; + static final String COOKIE_NAMES_HS2 = "hive.server2.auth"; // Non-configurable params: // Currently supports JKS keystore format @@ -560,4 +571,30 @@ static int getVersionPart(String fullVersion, int position) { } return version; } + + /** + * The function iterates through the list of cookies in the cookiestore and tries to + * match them with the cookieNames input. If there is a match, the cookieStore already + * has a valid cookie and the client need not send Credentials for validation purpose. + * @param cookieStore The cookie Store + * @param cookieNames Names of the cookies which needs to be validated + * @return true or false based on whether the client needs to send the credentials or + * not to the server. + */ + static boolean needToSendCredentials(CookieStore cookieStore, String cookieNames[]) { + if (cookieNames == null || cookieStore == null) { + return true; + } + + List cookies = cookieStore.getCookies(); + + for (Cookie c : cookies) { + for (String cn : cookieNames) { + if (c.getName().equals(cn)) { + return false; + } + } + } + return true; + } } diff --git a/pom.xml b/pom.xml index dee7464..103f291 100644 --- a/pom.xml +++ b/pom.xml @@ -124,8 +124,8 @@ 0.98.9-hadoop1 0.98.9-hadoop2 - 4.2.5 - 4.2.5 + 4.4 + 4.4 1.9.2 0.3.2 5.5.1 @@ -1082,6 +1082,16 @@ org.apache.hadoop hadoop-common ${hadoop-23.version} + + + org.apache.httpcomponents + httpcore + + + org.apache.httpcomponents + httpclient + + org.apache.hadoop