diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index a809f17..6dfd650 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1585,6 +1585,20 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) {
"Top level directory where operation logs are stored if logging functionality is enabled"),
HIVE_SERVER2_LOGGING_OPERATION_VERBOSE("hive.server2.logging.operation.verbose", false,
"When true, HS2 operation logs available for clients will be verbose"),
+
+ // Cookie based authentication
+ HIVE_SERVER2_COOKIE_AUTH_ENABLED("hive.server2.cookie.auth.enabled", true,
+ "When true, HS2 will use cookie based authentication mechanism"),
+ HIVE_SERVER2_COOKIE_MAX_AGE("hive.server2.cookie.max.age", "8",
+ new TimeValidator(TimeUnit.SECONDS),
+ "Maximum age in seconds for server side cookie used by HS2 in HTTP mode."),
+ HIVE_SERVER2_COOKIE_DOMAIN("hive.server2.cookie.domain", null,
+ "Domain for the HS2 generated cookies"),
+ HIVE_SERVER2_COOKIE_PATH("hive.server2.cookie.path", null,
+ "Path for the HS2 generated cookies"),
+ HIVE_SERVER2_CLIENT_MAX_RETRIES("hive.server2.client.maxretries", 5,
+ "Maximum number of retries made the client to the server when it receives a 401 error code"),
+
// logging configuration
HIVE_LOG4J_FILE("hive.log4j.file", "",
"Hive log4j configuration file.\n" +
diff --git a/jdbc/pom.xml b/jdbc/pom.xml
index 0e91a50..a76ee29 100644
--- a/jdbc/pom.xml
+++ b/jdbc/pom.xml
@@ -125,6 +125,16 @@
hadoop-common
${hadoop-23.version}
true
+
+
+ org.apache.httpcomponents
+ httpcore
+
+
+ org.apache.httpcomponents
+ httpclient
+
+
@@ -208,6 +218,7 @@
org.antlr:*
org.slf4j:slf4j-log4j12
log4j:*
+ org.apache.httpcomponents:*
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index 764a3f1..061dfae 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -50,10 +50,12 @@
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.protocol.HttpContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.auth.KerberosSaslHelper;
@@ -73,9 +75,13 @@
import org.apache.hive.service.cli.thrift.TRenewDelegationTokenResp;
import org.apache.hive.service.cli.thrift.TSessionHandle;
import org.apache.http.HttpRequestInterceptor;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.CookieStore;
+import org.apache.http.client.ServiceUnavailableRetryStrategy;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.ssl.SSLSocketFactory;
-import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.impl.client.BasicCookieStore;
+import org.apache.http.impl.client.HttpClients;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.transport.THttpClient;
@@ -109,6 +115,7 @@
private final List supportedProtocols = new LinkedList();
private int loginTimeout = 0;
private TProtocolVersion protocol;
+ HiveConf hiveConf = new HiveConf();
public HiveConnection(String uri, Properties info) throws SQLException {
setupLoginTimeout();
@@ -235,7 +242,7 @@ private String getServerHttpUrl(boolean useSsl) {
}
private TTransport createHttpTransport() throws SQLException, TTransportException {
- DefaultHttpClient httpClient;
+ CloseableHttpClient httpClient;
boolean useSsl = isSslConnection();
// Create an http client from the configs
httpClient = getHttpClient(useSsl);
@@ -259,10 +266,13 @@ private TTransport createHttpTransport() throws SQLException, TTransportExceptio
return transport;
}
- private DefaultHttpClient getHttpClient(Boolean useSsl) throws SQLException {
- DefaultHttpClient httpClient = new DefaultHttpClient();
+ private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException {
+ CloseableHttpClient httpClient = null;
+ boolean isCookieEnabled = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_COOKIE_AUTH_ENABLED);
+ CookieStore cookieStore = isCookieEnabled ? new BasicCookieStore() : null;
+
// Request interceptor for any request pre-processing logic
- HttpRequestInterceptor requestInterceptor;
+ HttpRequestInterceptor requestInterceptor;
// If Kerberos
if (isKerberosAuthMode()) {
/**
@@ -271,17 +281,17 @@ private DefaultHttpClient getHttpClient(Boolean useSsl) throws SQLException {
* for sending to the server before every request.
* In https mode, the entire information is encrypted
* TODO: Optimize this with a mix of kerberos + using cookie.
- */
+ */
requestInterceptor =
new HttpKerberosRequestInterceptor(sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL),
- host, getServerHttpUrl(useSsl), assumeSubject);
+ host, getServerHttpUrl(useSsl), assumeSubject, cookieStore);
}
else {
/**
* Add an interceptor to pass username/password in the header.
* In https mode, the entire information is encrypted
- */
- requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword());
+ */
+ requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword(), cookieStore);
}
// Configure httpClient for SSL
if (useSsl) {
@@ -324,8 +334,28 @@ private DefaultHttpClient getHttpClient(Boolean useSsl) throws SQLException {
throw new SQLException(msg, " 08S01", e);
}
}
- httpClient.addRequestInterceptor(requestInterceptor);
- return httpClient;
+ // Create a http client with a retry mechanism when the server returns a status code of 401.
+ httpClient = HttpClients.custom().setServiceUnavailableRetryStrategy(new ServiceUnavailableRetryStrategy() {
+ @Override
+ public boolean retryRequest(
+ final HttpResponse response, final int executionCount, final HttpContext context) {
+ int statusCode = response.getStatusLine().getStatusCode();
+ boolean ret = statusCode == 401 && executionCount < (new HiveConf()).getIntVar(ConfVars.HIVE_SERVER2_CLIENT_MAX_RETRIES);
+
+ // Set the context attribute to true which will be interpreted by the request interceptor
+ if (ret) {
+ context.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_TRUE);
+ }
+ return ret;
+ }
+
+ @Override
+ public long getRetryInterval() {
+ // Immediate retry
+ return 0;
+ }
+ }).addInterceptorFirst(requestInterceptor).build();
+ return httpClient;
}
/**
@@ -424,7 +454,6 @@ private String getClientDelegationToken(Map jdbcConnConf)
private void openSession() throws SQLException {
TOpenSessionReq openReq = new TOpenSessionReq();
-
Map openConf = new HashMap();
// for remote JDBC client, try to set the conf var using 'set foo=bar'
for (Entry hiveConf : connParams.getHiveConfs().entrySet()) {
@@ -453,6 +482,7 @@ private void openSession() throws SQLException {
try {
TOpenSessionResp openResp = client.OpenSession(openReq);
+
// validate connection
Utils.verifySuccess(openResp.getStatus());
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java
index dd4f62a..b27d89e 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java
@@ -20,15 +20,22 @@
import java.io.IOException;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.http.Header;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.CookieStore;
+import org.apache.http.client.protocol.ClientContext;
+import org.apache.http.cookie.*;
import org.apache.http.impl.auth.AuthSchemeBase;
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.protocol.HttpContext;
+import java.util.List;
+
/**
* The class is instantiated with the username and password, it is then
* used to add header with these credentials to HTTP requests
@@ -37,20 +44,52 @@
public class HttpBasicAuthInterceptor implements HttpRequestInterceptor {
UsernamePasswordCredentials credentials;
AuthSchemeBase authScheme;
-
+ CookieStore cookieStore;
+ HiveConf conf = new HiveConf();
+
public HttpBasicAuthInterceptor(String username, String password) {
if(username != null){
credentials = new UsernamePasswordCredentials(username, password);
}
authScheme = new BasicScheme();
+ cookieStore = null;
+ }
+
+
+ public HttpBasicAuthInterceptor(String username, String password,
+ CookieStore cookieStore) {
+ if(username != null){
+ credentials = new UsernamePasswordCredentials(username, password);
+ }
+ authScheme = new BasicScheme();
+ this.cookieStore = cookieStore;
}
@Override
public void process(HttpRequest httpRequest, HttpContext httpContext)
throws HttpException, IOException {
- Header basicAuthHeader = authScheme.authenticate(
- credentials, httpRequest, httpContext);
- httpRequest.addHeader(basicAuthHeader);
+
+ if (conf.getBoolVar(ConfVars.HIVE_SERVER2_COOKIE_AUTH_ENABLED)) {
+ httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore);
+
+ // Add the authentication details under the following scenarios:
+ // 1. The first time when the request is sent OR
+ // 2. The server returns a 401, which sometimes means the cookie has expired
+ if ((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null &&
+ (cookieStore == null ||
+ (cookieStore != null && cookieStore.getCookies().isEmpty()))) ||
+ (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null &&
+ httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY).equals(Utils.HIVE_SERVER2_RETRY_TRUE))) {
+ Header basicAuthHeader = authScheme.authenticate(
+ credentials, httpRequest, httpContext);
+ httpRequest.addHeader(basicAuthHeader);
+ }
+ httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_FALSE);
+ } else {
+ Header basicAuthHeader = authScheme.authenticate(
+ credentials, httpRequest, httpContext);
+ httpRequest.addHeader(basicAuthHeader);
+ }
}
}
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
index 2d21547..b961ab1 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
@@ -19,12 +19,19 @@
package org.apache.hive.jdbc;
import java.io.IOException;
+import java.util.List;
import java.util.concurrent.locks.ReentrantLock;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.service.auth.HttpAuthUtils;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
+import org.apache.http.Header;
import org.apache.http.HttpRequestInterceptor;
+import org.apache.http.client.CookieStore;
+import org.apache.http.client.protocol.ClientContext;
+import org.apache.http.cookie.Cookie;
import org.apache.http.protocol.HttpContext;
/**
@@ -40,36 +47,72 @@
String host;
String serverHttpUrl;
boolean assumeSubject;
-
+ CookieStore cookieStore;
+ HiveConf conf = new HiveConf();
+
// A fair reentrant lock
private static ReentrantLock kerberosLock = new ReentrantLock(true);
-
+
public HttpKerberosRequestInterceptor(String principal, String host,
String serverHttpUrl, boolean assumeSubject) {
+ this.principal = principal;
+ this.host = host;
+ this.serverHttpUrl = serverHttpUrl;
+ this.assumeSubject = assumeSubject;
+ cookieStore = null;
+ }
+
+ public HttpKerberosRequestInterceptor(String principal, String host,
+ String serverHttpUrl, boolean assumeSubject, CookieStore cs) {
this.principal = principal;
this.host = host;
this.serverHttpUrl = serverHttpUrl;
this.assumeSubject = assumeSubject;
+ this.cookieStore = cs;
}
@Override
public void process(HttpRequest httpRequest, HttpContext httpContext)
throws HttpException, IOException {
String kerberosAuthHeader;
+
try {
- // Generate the service ticket for sending to the server.
- // Locking ensures the tokens are unique in case of concurrent requests
- kerberosLock.lock();
- kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket(
- principal, host, serverHttpUrl, assumeSubject);
- // Set the session key token (Base64 encoded) in the headers
- httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " +
- HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader);
+ //Generate the service ticket for sending to the server.
+ // Locking ensures the tokens are unique in case of concurrent requests
+ kerberosLock.lock();
+ // If cookie based authentication is allowed, generate ticket only when necessary.
+ // The necessary condition is either when there are no server side cookies in the
+ // cookiestore which can be send back or when the server returns a 401 error code
+ // indicating that the previous cookie has expired.
+ if (conf.getBoolVar(ConfVars.HIVE_SERVER2_COOKIE_AUTH_ENABLED)) {
+ httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore);
+ // Generate the kerberos ticket under the following scenarios:
+ // 1. The first time when the request is sent OR
+ // 2. The server returns a 401, which sometimes means the cookie has expired
+ if ((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null &&
+ (cookieStore == null ||
+ (cookieStore != null && cookieStore.getCookies().isEmpty()))) ||
+ (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null &&
+ httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY).equals(Utils.HIVE_SERVER2_RETRY_TRUE))) {
+ kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket(
+ principal, host, serverHttpUrl, assumeSubject);
+ // Set the session key token (Base64 encoded) in the headers
+ httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " +
+ HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader);
+ }
+ httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_FALSE);
+ } else {
+ kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket(
+ principal, host, serverHttpUrl, assumeSubject);
+ // Set the session key token (Base64 encoded) in the headers
+ httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " +
+ HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader);
+ }
} catch (Exception e) {
throw new HttpException(e.getMessage(), e);
}
finally {
kerberosLock.unlock();
- }
+ }
}
}
diff --git a/jdbc/src/java/org/apache/hive/jdbc/Utils.java b/jdbc/src/java/org/apache/hive/jdbc/Utils.java
index a27a532..5493462 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/Utils.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/Utils.java
@@ -56,6 +56,11 @@
private static final String URI_HIVE_PREFIX = "hive2:";
+ // This value is set to true by the setServiceUnavailableRetryStrategy() when the server returns 401
+ static final String HIVE_SERVER2_RETRY_KEY = "hive.server2.retryserver";
+ static final String HIVE_SERVER2_RETRY_TRUE = "true";
+ static final String HIVE_SERVER2_RETRY_FALSE = "false";
+
public static class JdbcConnectionParams {
// Note on client side parameter naming convention:
// Prefer using a shorter camelCase param name instead of using the same name as the
diff --git a/pom.xml b/pom.xml
index f302524..cd8bc23 100644
--- a/pom.xml
+++ b/pom.xml
@@ -124,8 +124,8 @@
0.98.9-hadoop1
0.98.9-hadoop2
- 4.2.5
- 4.2.5
+ 4.4
+ 4.4
1.9.2
0.3.2
5.5.1
@@ -444,7 +444,7 @@
httpclient
${httpcomponents.client.version}
-
+
org.apache.httpcomponents
httpcore
${httpcomponents.core.version}
@@ -1082,6 +1082,16 @@
org.apache.hadoop
hadoop-common
${hadoop-23.version}
+
+
+ org.apache.httpcomponents
+ httpcore
+
+
+ org.apache.httpcomponents
+ httpclient
+
+
org.apache.hadoop
diff --git a/service/src/java/org/apache/hive/service/CookieSigner.java b/service/src/java/org/apache/hive/service/CookieSigner.java
new file mode 100644
index 0000000..ac2d7b2
--- /dev/null
+++ b/service/src/java/org/apache/hive/service/CookieSigner.java
@@ -0,0 +1,93 @@
+package org.apache.hive.service;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.logging.Log;
+
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+
+/**
+ * The cookie signer generates a signature based on SHA digest
+ * and appends it to the cookie value generated at the
+ * server side. It uses SHA digest algorithm to sign and verify signatures.
+ */
+public class CookieSigner {
+ private static final String SIGNATURE = "&s=";
+ private static final String SHA_STRING = "SHA";
+ private byte[] secretBytes;
+ private static final Log LOG = LogFactory.getLog(CookieSigner.class);
+
+ /**
+ * Constructor
+ * @param secret Secret Bytes
+ */
+ public CookieSigner(byte[] secret) {
+ if (secret == null) {
+ throw new IllegalArgumentException(" NULL Secret Bytes");
+ }
+ this.secretBytes = secret.clone();
+ }
+
+
+ /**
+ * Sign the cookie given the string token as input.
+ * @param str Input token
+ * @return Signed token that can be used to create a cookie
+ */
+ public String signCookie(String str) {
+ if (str == null || str.isEmpty()) {
+ throw new IllegalArgumentException("NULL or empty string to sign");
+ }
+ String signature = getSignature(str);
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Signature generated for " + str + " is " + signature);
+ }
+ return str + SIGNATURE + signature;
+ }
+
+
+ /**
+ * Verify a signed string and extracts the original string.
+ * @param signedStr The already signed string
+ * @return Raw Value of the string without the signature
+ */
+ public String verifyAndExtract(String signedStr) {
+ int index = signedStr.lastIndexOf(SIGNATURE);
+ if (index == -1) {
+ throw new IllegalArgumentException("Invalid input sign: " + signedStr);
+ }
+ String originalSignature = signedStr.substring(index + SIGNATURE.length());
+ String rawValue = signedStr.substring(0, index);
+ String currentSignature = getSignature(rawValue);
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Signature generated for " + rawValue + " inside verify is " + currentSignature);
+ }
+ if (!originalSignature.equals(currentSignature)) {
+ throw new IllegalArgumentException("Invalid sign, original = " + originalSignature +
+ " current = " + currentSignature);
+ }
+ return rawValue;
+ }
+
+
+ /**
+ * Get the signature of the input string based on SHA digest algorithm.
+ * @param str Input token
+ * @return Signed String
+ */
+ private String getSignature(String str) {
+ try {
+ MessageDigest md = MessageDigest.getInstance(SHA_STRING);
+ md.update(str.getBytes());
+ md.update(secretBytes);
+ byte[] digest = md.digest();
+ return new Base64(0).encodeToString(digest);
+ } catch (NoSuchAlgorithmException ex) {
+ throw new RuntimeException("Invalid SHA digest String: " + SHA_STRING +
+ " " + ex.getMessage(), ex);
+ }
+ }
+}
diff --git a/service/src/java/org/apache/hive/service/ServiceUtils.java b/service/src/java/org/apache/hive/service/ServiceUtils.java
index e712aaf..8c68801 100644
--- a/service/src/java/org/apache/hive/service/ServiceUtils.java
+++ b/service/src/java/org/apache/hive/service/ServiceUtils.java
@@ -17,8 +17,30 @@
*/
package org.apache.hive.service;
-public class ServiceUtils {
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.StringTokenizer;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class ServiceUtils {
+ private static final Log LOG = LogFactory.getLog(ServiceUtils.class);
+ private static final String COOKIE_ATTR_SEPARATOR = "&";
+ private static final String COOKIE_CLIENT_USER_NAME = "cu";
+ private static final String COOKIE_CLIENT_IP_ADDRESS = "ip";
+ private final static Set COOKIE_ATTRIBUTES =
+ new HashSet(Arrays.asList(COOKIE_CLIENT_USER_NAME, COOKIE_CLIENT_IP_ADDRESS));
+
+ public class CookieData {
+ public String userName;
+ public String ipAddress;
+ }
+
/*
* Get the index separating the user name from domain name (the user's name up
* to the first '/' or '@').
@@ -41,4 +63,63 @@ public static int indexOfDomainMatch(String userName) {
}
return endIdx;
}
-}
\ No newline at end of file
+
+
+ /**
+ * Creates and returns a HS2 cookie token.
+ * @param clientUserName Client User name.
+ * @param clientIpAddress Client IP Address.
+ * @return An unsigned cookie token generated from input parameters.
+ */
+ public static String createCookieToken(String clientUserName, String clientIpAddress) {
+ StringBuffer sb = new StringBuffer();
+ sb.append(COOKIE_CLIENT_USER_NAME).append("=").append(clientUserName).
+ append(COOKIE_ATTR_SEPARATOR);
+ sb.append(COOKIE_CLIENT_IP_ADDRESS).append("=").append(clientIpAddress);
+ return sb.toString();
+ }
+
+
+ /**
+ * Parses a cookie token to client user name and client IP Address.
+ * @param tokenStr Token String.
+ * @return A valid cookiedata object if input is of valid format, else returns null.
+ */
+ public CookieData parseCookieToken(String tokenStr) {
+ Map map = splitCookieToken(tokenStr);
+
+ if (!map.keySet().equals(COOKIE_ATTRIBUTES)) {
+ LOG.error("Invalid token with missing attributes " + tokenStr);
+ return null;
+ }
+ CookieData c = new CookieData();
+ c.userName = map.get(COOKIE_CLIENT_USER_NAME);
+ c.ipAddress = map.get(COOKIE_CLIENT_IP_ADDRESS);
+ return c;
+ }
+
+
+ /**
+ * Splits the cookie token into attributes pairs.
+ * @param str input token.
+ * @return a map with the attribute pairs of the token if the input is valid.
+ * Else, returns null.
+ */
+ private static Map splitCookieToken(String tokenStr) {
+ Map map = new HashMap();
+ StringTokenizer st = new StringTokenizer(tokenStr, COOKIE_ATTR_SEPARATOR);
+
+ while (st.hasMoreTokens()) {
+ String part = st.nextToken();
+ int separator = part.indexOf('=');
+ if (separator == -1) {
+ LOG.error("Invalid token string " + tokenStr);
+ return null;
+ }
+ String key = part.substring(0, separator);
+ String value = part.substring(separator + 1);
+ map.put(key, value);
+ }
+ return map;
+ }
+}
diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
index fde39d2..fbce905 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
@@ -19,11 +19,16 @@
package org.apache.hive.service.cli.thrift;
import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
import java.security.PrivilegedExceptionAction;
import java.util.Map;
+import java.util.Random;
import java.util.Set;
+import java.util.StringTokenizer;
import javax.servlet.ServletException;
+import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@@ -31,6 +36,8 @@
import org.apache.commons.codec.binary.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
@@ -41,6 +48,9 @@
import org.apache.hive.service.auth.HttpAuthenticationException;
import org.apache.hive.service.auth.PasswdAuthenticationProvider;
import org.apache.hive.service.cli.session.SessionManager;
+import org.apache.hive.service.CookieSigner;
+import org.apache.hive.service.ServiceUtils;
+import org.apache.hive.service.ServiceUtils.CookieData;
import org.apache.thrift.TProcessor;
import org.apache.thrift.protocol.TProtocolFactory;
import org.apache.thrift.server.TServlet;
@@ -63,41 +73,81 @@
private final String authType;
private final UserGroupInformation serviceUGI;
private final UserGroupInformation httpUGI;
-
+ private HiveConf hiveConf = new HiveConf();
+
+ // Class members for cookie based authentication.
+ private CookieSigner signer;
+ public static final String AUTH_COOKIE = "hive.server2.auth";
+ private static final Random RAN = new Random();
+ private String cookieDomain;
+ private String cookiePath;
+ private int cookieMaxAge;
+
+
public ThriftHttpServlet(TProcessor processor, TProtocolFactory protocolFactory,
String authType, UserGroupInformation serviceUGI, UserGroupInformation httpUGI) {
super(processor, protocolFactory);
this.authType = authType;
this.serviceUGI = serviceUGI;
this.httpUGI = httpUGI;
+ // Initialize the cookie based authentication related variables.
+ if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_COOKIE_AUTH_ENABLED)) {
+ // Generate the signer with secret.
+ String secret = Long.toString(RAN.nextLong());
+ LOG.debug("Using the random number as the secret for cookie generation " + secret);
+ this.signer = new CookieSigner(secret.getBytes());
+ this.cookieMaxAge = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_COOKIE_MAX_AGE);
+ this.cookieDomain = hiveConf.getVar(ConfVars.HIVE_SERVER2_COOKIE_DOMAIN);
+ this.cookiePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_COOKIE_PATH);
+ }
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
- String clientUserName;
+ String clientUserName = null;
String clientIpAddress;
+ boolean requireNewCookie = false;
+
try {
- // For a kerberos setup
- if(isKerberosAuthMode(authType)) {
- clientUserName = doKerberosAuth(request);
- String doAsQueryParam = getDoAsQueryParam(request.getQueryString());
- if (doAsQueryParam != null) {
- SessionManager.setProxyUserName(doAsQueryParam);
+ // If the cookie based authentication is already enabled, parse the
+ // request and validate the request cookies.
+ if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_COOKIE_AUTH_ENABLED)) {
+ clientUserName = validateCookie(request);
+ requireNewCookie = clientUserName == null;
+ if (requireNewCookie) {
+ LOG.info("Could not validate cookie sent, will try to generate a new cookie");
}
}
- else {
- clientUserName = doPasswdAuth(request, authType);
+ // If the cookie based authentication is not enabled or the request does
+ // not have a valid cookie, use the kerberos or password based authentication
+ // depending on the server setup.
+ if (clientUserName == null) {
+ // For a kerberos setup
+ if (isKerberosAuthMode(authType)) {
+ clientUserName = doKerberosAuth(request);
+ String doAsQueryParam = getDoAsQueryParam(request.getQueryString());
+ if (doAsQueryParam != null) {
+ SessionManager.setProxyUserName(doAsQueryParam);
+ }
+ }
+ // For password based authentication
+ else {
+ clientUserName = doPasswdAuth(request, authType);
+ }
}
- LOG.debug("Client username: " + clientUserName);
// Set the thread local username to be used for doAs if true
SessionManager.setUserName(clientUserName);
-
clientIpAddress = request.getRemoteAddr();
- LOG.debug("Client IP Address: " + clientIpAddress);
// Set the thread local ip address
- SessionManager.setIpAddress(clientIpAddress);
-
+ SessionManager.setIpAddress(clientIpAddress);
+ // Generate new cookie and add it to the response
+ if (requireNewCookie &&
+ !authType.equalsIgnoreCase(HiveAuthFactory.AuthTypes.NOSASL.toString())) {
+ String cookieToken = ServiceUtils.createCookieToken(clientUserName, clientIpAddress);
+ LOG.info("Cookie added for clientUserName " + clientUserName);
+ response.addCookie(createCookie(signer.signCookie(cookieToken)));
+ }
super.doPost(request, response);
}
catch (HttpAuthenticationException e) {
@@ -107,7 +157,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
if(isKerberosAuthMode(authType)) {
response.addHeader(HttpAuthUtils.WWW_AUTHENTICATE, HttpAuthUtils.NEGOTIATE);
}
- response.getWriter().println("Authentication Error: " + e.getMessage());
+ response.getWriter().println("Authentication Error: " + e.getMessage());
}
finally {
// Clear the thread locals
@@ -116,7 +166,125 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
SessionManager.clearProxyUserName();
}
}
+
+
+ /**
+ * Retrieves the client name from cookieString. If the cookie does not
+ * correspond to a valid client, the function returns null.
+ * @param cookieString HTTP Request cookies separated by newline character.
+ * @param request HTTP Request
+ * @return Client Username if cookieString has a HS2 Generated cookie that is currently valid.
+ * Else, returns null.
+ * @throws UnsupportedEncodingException
+ */
+ private String getClientNameFromCookie(String cookieString, HttpServletRequest request) throws UnsupportedEncodingException {
+ StringTokenizer cookieParser = new StringTokenizer(cookieString, ";\n");
+ String nvp, cName, cValue;
+ int l;
+
+ while (cookieParser.hasMoreTokens()) {
+ nvp = cookieParser.nextToken();
+ if (nvp.charAt(0) == ' ') {
+ nvp = new String(nvp.substring(1));
+ }
+ l = nvp.indexOf('=');
+ if (l == -1) {
+ continue;
+ }
+ //Get the actual cookie value
+ cName = new String(nvp.substring(0, l));
+ if (cName.equals(AUTH_COOKIE)) {
+ int nvpLen = nvp.length();
+ cValue = nvp.substring(l+1, nvpLen);
+ cValue = cValue.replaceAll("\"", "");
+ } else {
+ continue;
+ }
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("cValue " + cValue);
+ }
+ // If the key matches AUTH_COOKIE, validate the value.
+ if (cName.equals(AUTH_COOKIE)) {
+ cValue = signer.verifyAndExtract(cValue);
+ if (cValue != null) {
+ CookieData cookieData = (new ServiceUtils()).parseCookieToken(cValue);
+
+ if (cookieData == null) {
+ LOG.warn("Invalid cookie token " + cValue);
+ continue;
+ }
+ if (!cookieData.ipAddress.equals(request.getRemoteAddr())) {
+ LOG.warn("Invalid cookie ip address " + cookieData.ipAddress +
+ " , expected :" + request.getRemoteAddr());
+ continue;
+ }
+ //We have found a valid cookie in the client request.
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Validated the cookie for user " + cookieData.userName);
+ }
+ return cookieData.userName;
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Validate the request cookie. This function iterates over the request cookie headers
+ * and finds a cookie that represents a valid client/server session. If it finds one, it
+ * returns the client name associated with the session. Else, it returns null.
+ * @param request The HTTP Servlet Request send by the client
+ * @return Client Username if the request has valid HS2 cookie, else returns null
+ * @throws UnsupportedEncodingException
+ */
+ private String validateCookie(HttpServletRequest request) throws UnsupportedEncodingException {
+ String cookieString;
+
+ // Find all the valid cookies associated with the request.
+ cookieString = request.getHeader("COOKIE");
+ if (cookieString == null) {
+ cookieString = request.getHeader("Cookie");
+ }
+ if (cookieString == null) {
+ cookieString = request.getHeader("cookie");
+ }
+ if (cookieString == null) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("No valid cookies associated with the request " + request);
+ }
+ return null;
+ }
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Received cookie headers " + cookieString);
+ }
+ return getClientNameFromCookie(cookieString, request);
+ }
+
+
+ /**
+ * Generate a server side cookie given the cookie value as the input.
+ * @param str Input string token.
+ * @return The generated cookie.
+ * @throws UnsupportedEncodingException
+ */
+ private Cookie createCookie(String str) throws UnsupportedEncodingException {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Cookie name = " + AUTH_COOKIE + " value = " + str);
+ }
+ Cookie cookie = new Cookie(AUTH_COOKIE, str);
+
+ cookie.setMaxAge(cookieMaxAge);
+ if (cookieDomain != null) {
+ cookie.setDomain(cookieDomain);
+ }
+ if (cookiePath != null) {
+ cookie.setPath(cookiePath);
+ }
+ return cookie;
+ }
+
+
/**
* Do the LDAP/PAM authentication
* @param request