diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index 13fc19b..a1b0df6 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -18,8 +18,12 @@ package org.apache.hive.jdbc; +import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION; + import java.io.FileInputStream; +import java.io.IOException; import java.security.KeyStore; +import java.security.PrivilegedExceptionAction; import java.sql.Array; import java.sql.Blob; import java.sql.CallableStatement; @@ -50,11 +54,19 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.security.SaslRpcServer; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.service.auth.HiveAuthFactory; +import org.apache.hive.service.auth.HttpAuthHelper; +import org.apache.hive.service.auth.HttpAuthenticationException; import org.apache.hive.service.auth.KerberosSaslHelper; import org.apache.hive.service.auth.PlainSaslHelper; import org.apache.hive.service.auth.SaslQOP; @@ -65,16 +77,29 @@ import org.apache.hive.service.cli.thrift.TOpenSessionResp; import org.apache.hive.service.cli.thrift.TProtocolVersion; import org.apache.hive.service.cli.thrift.TSessionHandle; +import org.apache.http.Header; import org.apache.http.HttpRequestInterceptor; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.conn.ssl.SSLContexts; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; +import org.apache.http.message.BufferedHeader; +import org.apache.http.protocol.BasicHttpContext; +import org.apache.http.protocol.HttpContext; +import org.apache.http.util.CharArrayBuffer; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.transport.THttpClient; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; +import org.ietf.jgss.GSSContext; +import org.ietf.jgss.GSSException; +import org.ietf.jgss.GSSManager; +import org.ietf.jgss.GSSName; +import org.ietf.jgss.Oid; /** * HiveConnection. @@ -189,26 +214,19 @@ private void openTransport() throws SQLException { private TTransport createHttpTransport() throws SQLException { CloseableHttpClient httpClient; - // http path should begin with "/" - String httpPath; - httpPath = hiveConfMap.get( - HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname); - if(httpPath == null) { - httpPath = "/"; - } - if(!httpPath.startsWith("/")) { - httpPath = "/" + httpPath; - } boolean useSsl = "true".equalsIgnoreCase(sessConfMap.get(HIVE_USE_SSL)); + String httpUrl = getServerHttpUrl(useSsl); // Create an http client from the configs - httpClient = getHttpClient(useSsl); + try { + httpClient = getHttpClient(useSsl); + } catch (Exception e) { + String msg = "Could not create http connection to " + + jdbcURI + ". " + e.getMessage(); + throw new SQLException(msg, " 08S01", e); + } - // Create the http/https url - // JDBC driver will set up an https url if ssl is enabled, otherwise http - String schemeName = useSsl ? "https" : "http"; - String httpUrl = schemeName + "://" + host + ":" + port + httpPath; try { transport = new THttpClient(httpUrl, httpClient); } @@ -220,42 +238,258 @@ private TTransport createHttpTransport() throws SQLException { return transport; } + private String getServerHttpUrl(boolean useSsl) { + // Create the http/https url + // JDBC driver will set up an https url if ssl is enabled, otherwise http + String schemeName = useSsl ? "https" : "http"; + // http path should begin with "/" + String httpPath; + httpPath = hiveConfMap.get( + HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH.varname); + if(httpPath == null) { + httpPath = "/"; + } + if(!httpPath.startsWith("/")) { + httpPath = "/" + httpPath; + } + return schemeName + "://" + host + ":" + port + httpPath; + } + + private String getServerHttpUrl() { + return getServerHttpUrl(false); + } + private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException { - // Add an interceptor to pass username/password in the header - // for basic preemtive http authentication at the server - // In https mode, the entire information is encrypted - HttpRequestInterceptor authInterceptor = new HttpBasicAuthInterceptor( - getUserName(), getPasswd()); - if (useSsl) { - String sslTrustStorePath = sessConfMap.get(HIVE_SSL_TRUST_STORE); - String sslTrustStorePassword = sessConfMap.get( - HIVE_SSL_TRUST_STORE_PASSWORD); - KeyStore sslTrustStore; - SSLContext sslContext; - if (sslTrustStorePath == null || sslTrustStorePath.isEmpty()) { - // Create a default client context based on standard JSSE trust material - sslContext = SSLContexts.createDefault(); - } else { - // Pick trust store config from the given path - try { - sslTrustStore = KeyStore.getInstance(HIVE_SSL_TRUST_STORE_TYPE); - sslTrustStore.load(new FileInputStream(sslTrustStorePath), - sslTrustStorePassword.toCharArray()); - sslContext = SSLContexts.custom().loadTrustMaterial( - sslTrustStore).build(); + // If Kerberos + if (!HIVE_AUTH_SIMPLE.equals(sessConfMap.get(HIVE_AUTH_TYPE)) + && sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL)) { + try { + /** + * Do kerberos authentication and get the final service ticket, + * for sending to the server, + * Add an interceptor which sets the appropriate header to the request + */ + String tokenString = doKerberosAuth(); + HttpKerberosRequestInterceptor kerberosInterceptor = new HttpKerberosRequestInterceptor(tokenString); + return HttpClients.custom().addInterceptorFirst(kerberosInterceptor).build(); + } catch (Exception e) { + String msg = "Could not create a kerberized http connection to " + + jdbcURI + ". " + e.getMessage(); + throw new SQLException(msg, " 08S01", e); + } + } + else { + /** + * Add an interceptor to pass username/password in the header, + * for basic preemtive http authentication at the server. + * In https mode, the entire information is encrypted + */ + HttpRequestInterceptor authInterceptor = new HttpBasicAuthInterceptor( + getUserName(), getPasswd()); + if (useSsl) { + String sslTrustStorePath = sessConfMap.get(HIVE_SSL_TRUST_STORE); + String sslTrustStorePassword = sessConfMap.get( + HIVE_SSL_TRUST_STORE_PASSWORD); + KeyStore sslTrustStore; + SSLContext sslContext; + if (sslTrustStorePath == null || sslTrustStorePath.isEmpty()) { + // Create a default client context based on standard JSSE trust material + sslContext = SSLContexts.createDefault(); + } else { + // Pick trust store config from the given path + try { + sslTrustStore = KeyStore.getInstance(HIVE_SSL_TRUST_STORE_TYPE); + sslTrustStore.load(new FileInputStream(sslTrustStorePath), + sslTrustStorePassword.toCharArray()); + sslContext = SSLContexts.custom().loadTrustMaterial( + sslTrustStore).build(); + } + catch (Exception e) { + String msg = "Could not create an https connection to " + + jdbcURI + ". " + e.getMessage(); + throw new SQLException(msg, " 08S01", e); + } + } + return HttpClients.custom().setHostnameVerifier(SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER).setSslcontext( + sslContext).addInterceptorFirst(authInterceptor).build(); + } + else { + // Create a plain http client + return HttpClients.custom().addInterceptorFirst(authInterceptor).build(); + } + } + } + + /** + * + * @return Base64 encoded service ticket token on success + * @throws GSSException + * @throws IOException + * @throws InterruptedException + */ + private String doKerberosAuth() throws GSSException, IOException, InterruptedException { + UserGroupInformation clientUGI = getClientUGI(); + String serverPrincipal = getServerPrincipal(); + // Uses the Ticket Granting Ticket in the UserGroupInformation + return clientUGI.doAs(new HttpKerberosClientAction(serverPrincipal)); + } + + /** + * JAAS login to setup the client UserGroupInformation. + * Sets up the kerberos Ticket Granting Ticket, + * in the client UserGroupInformation object + * @return Client's UserGroupInformation + * @throws IOException + */ + private UserGroupInformation getClientUGI() throws IOException { + // Init UserGroupInformation + Configuration conf = new Configuration(); + conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos"); + UserGroupInformation.setConfiguration(conf); + return UserGroupInformation.getCurrentUser(); + } + + /** + * Get server pricipal and verify that hostname is present + * @return + * @throws IOException + */ + private String getServerPrincipal() throws IOException { + String serverPrincipal = SecurityUtil.getServerPrincipal(sessConfMap.get(HIVE_AUTH_PRINCIPAL), host); + String names[] = SaslRpcServer.splitKerberosName(serverPrincipal); + if (names.length != 3) { + throw new IOException( + "Kerberos principal name does NOT have the expected hostname part: " + + serverPrincipal); + } + return serverPrincipal; + } + + /** + * + * HttpKerberosClientAction + * + */ + class HttpKerberosClientAction implements PrivilegedExceptionAction { + String serverPrincipal; + private final Base64 base64codec; + public static final String HTTP_RESPONSE = "HTTP_RESPONSE"; + public static final String SERVER_HTTP_URL = "SERVER_HTTP_URL"; + private HttpContext httpContext; + + HttpKerberosClientAction(String serverPrincipal) { + this.serverPrincipal = serverPrincipal; + this.base64codec = new Base64(0); + this.httpContext = new BasicHttpContext(); + httpContext.setAttribute(SERVER_HTTP_URL, getServerHttpUrl()); + } + + @Override + public String run() throws Exception { + // This Oid for SPNEGO GSS-API mechanism. + Oid spnegoOid = new Oid("1.3.6.1.5.5.2"); + + GSSManager manager = GSSManager.getInstance(); + + // Create a GSSName out of the server's name. + GSSName serverName = manager.createName(serverPrincipal, + GSSName.NT_HOSTBASED_SERVICE, spnegoOid); + + /* + * Create a GSSContext for mutual authentication with the + * server. + * - serverName is the GSSName that represents the server. + * - spnegoOid is the Oid that represents the mechanism to + * use. The client chooses the mechanism to use. + * - null is passed in for client credentials + * - DEFAULT_LIFETIME lets the mechanism decide how long the + * context can remain valid. + * Note: Passing in null for the credentials asks GSS-API to + * use the default credentials. This means that the mechanism + * will look among the credentials stored in the current Subject (UserGroupInformation) + * to find the right kind of credentials that it needs. + */ + GSSContext gssContext = manager.createContext(serverName, + spnegoOid, + null, + GSSContext.DEFAULT_LIFETIME); + + // Mutual authentication not required + gssContext.requestMutualAuth(false); + + // Estabilish context + byte[] inToken = new byte[0]; + byte[] outToken; + while (!gssContext.isEstablished()) { + outToken = gssContext.initSecContext(inToken, 0, inToken.length); + if (outToken != null) { + String negotiationStatus = HttpAuthHelper.COMPLETE; + if(!gssContext.isEstablished()) { + negotiationStatus = HttpAuthHelper.NEGOTIATE; + } + sendHttpToken(outToken, httpContext, negotiationStatus); } - catch (Exception e) { - String msg = "Could not create an https connection to " + - jdbcURI + ". " + e.getMessage(); - throw new SQLException(msg, " 08S01", e); + if(!gssContext.isEstablished()) { + inToken = readHttpToken(httpContext); } } - return HttpClients.custom().setHostnameVerifier(SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER).setSslcontext( - sslContext).addInterceptorFirst(authInterceptor).build(); + return new String(base64codec.encode(inToken)); } - else { - // Create a plain http client - return HttpClients.custom().addInterceptorFirst(authInterceptor).build(); + + /** + * Sends the token generated in the kerberos client sequence to the server + * The token is sent in the following header line: + * Authorization: Negotiate + * Gets the response from the server and populates the httpContext with it. + * @throws IOException + * @throws ClientProtocolException + */ + private void sendHttpToken(byte[] outToken, HttpContext httpContext, + String negotiationStatus) + throws HttpAuthenticationException { + CloseableHttpClient httpClient = HttpClients.createDefault(); + HttpPost httpPost = new HttpPost((String) httpContext.getAttribute(SERVER_HTTP_URL)); + String tokenString = new String(base64codec.encode(outToken)); + CharArrayBuffer buffer = new CharArrayBuffer(32); + buffer.append(HttpAuthHelper.AUTHORIZATION); + buffer.append(": " + negotiationStatus); + buffer.append(tokenString); + httpPost.addHeader(new BufferedHeader(buffer)); + CloseableHttpResponse httpResponse; + try { + httpResponse = httpClient.execute(httpPost, httpContext); + } catch (Exception e) { + throw new HttpAuthenticationException("Authentication error " + + "during Kerberos negotiation: ", e); + } + httpContext.setAttribute(HTTP_RESPONSE, httpResponse); + } + + /** + * Reads and decodes the token received from the server response + * Expects the response to be either HTTP 200 or HTTP 401 + * @throws HttpAuthenticationException + */ + private byte[] readHttpToken(HttpContext httpContext) throws HttpAuthenticationException { + CloseableHttpResponse httpResponse = (CloseableHttpResponse) httpContext.getAttribute(HTTP_RESPONSE); + int status = httpResponse.getStatusLine().getStatusCode(); + + if (status == HttpStatus.SC_OK || status == HttpStatus.SC_UNAUTHORIZED) { + Header authHeader = httpResponse.getFirstHeader(HttpAuthHelper.WWW_AUTHENTICATE); + if (authHeader == null) { + throw new HttpAuthenticationException("Invalid HTTP Kerberos authentication " + + "header received from the server"); + } + String tokenString = authHeader.getValue(); + if (tokenString == null) { + throw new HttpAuthenticationException("Invalid HTTP Kerberos authentication " + + "header received from the server"); + } + return base64codec.decode(tokenString); + } + // Throw exception if response status is neither 200 nor 401 + throw new HttpAuthenticationException("Invalid HTTP Kerberos authentication " + + "header received from the server"); } } @@ -317,7 +551,6 @@ private TTransport createBinaryTransport() throws SQLException { return transport; } - private boolean isHttpTransportMode() { String transportMode = hiveConfMap.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname); diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java index 66eba1b..d7e47d1 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java @@ -25,6 +25,7 @@ import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.impl.auth.AuthSchemeBase; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.protocol.HttpContext; @@ -34,20 +35,20 @@ * */ public class HttpBasicAuthInterceptor implements HttpRequestInterceptor { + UsernamePasswordCredentials credentials; + AuthSchemeBase authScheme; - Header basicAuthHeader; - public HttpBasicAuthInterceptor(String username, String password){ + public HttpBasicAuthInterceptor(String username, String password) { if(username != null){ - UsernamePasswordCredentials creds = new UsernamePasswordCredentials(username, password); - basicAuthHeader = BasicScheme.authenticate(creds, "UTF-8", false); + credentials = new UsernamePasswordCredentials(username, password); } + authScheme = new BasicScheme(); } @Override public void process(HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException { - if(basicAuthHeader != null){ - httpRequest.addHeader(basicAuthHeader); - } + Header basicAuthHeader = authScheme.authenticate(credentials, httpRequest, httpContext); + httpRequest.addHeader(basicAuthHeader); } } diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java new file mode 100644 index 0000000..411e512 --- /dev/null +++ b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.jdbc; + +import java.io.IOException; + +import org.apache.hive.service.auth.HttpAuthHelper; +import org.apache.http.HttpException; +import org.apache.http.HttpRequest; +import org.apache.http.HttpRequestInterceptor; +import org.apache.http.message.BufferedHeader; +import org.apache.http.protocol.HttpContext; +import org.apache.http.util.CharArrayBuffer; + +public class HttpKerberosRequestInterceptor implements HttpRequestInterceptor { + + String tokenString; + + public HttpKerberosRequestInterceptor(String tokenString) { + this.tokenString = tokenString; + } + + @Override + public void process(HttpRequest httpRequest, HttpContext httpContext) throws HttpException, IOException { + // Set the previously authorized session key token (Base64 encoded) in the header + CharArrayBuffer buffer = new CharArrayBuffer(32); + buffer.append(HttpAuthHelper.AUTHORIZATION); + buffer.append(": " + HttpAuthHelper.COMPLETE); + buffer.append(tokenString); + httpRequest.addHeader(new BufferedHeader(buffer)); + } + +} diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java index d8ba3aa..44fb3ad 100644 --- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -53,7 +53,7 @@ KERBEROS("KERBEROS"), CUSTOM("CUSTOM"); - private String authType; // Auth type for SASL + private String authType; AuthTypes(String authType) { this.authType = authType; @@ -62,42 +62,50 @@ public String getAuthName() { return authType; } - }; private HadoopThriftAuthBridge.Server saslServer = null; private String authTypeStr; + private String transportMode; HiveConf conf; public HiveAuthFactory() throws TTransportException { conf = new HiveConf(); - + transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE); authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION); - if (authTypeStr == null) { - authTypeStr = AuthTypes.NONE.getAuthName(); + + // In http mode we use NOSASL as the default auth type + if (transportMode.equalsIgnoreCase("http")) { + if (authTypeStr == null) { + authTypeStr = AuthTypes.NOSASL.getAuthName(); + } } - if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName()) - && ShimLoader.getHadoopShims().isSecureShimImpl()) { - saslServer = ShimLoader.getHadoopThriftAuthBridge().createServer( - conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB), - conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL) - ); + else { + if (authTypeStr == null) { + authTypeStr = AuthTypes.NONE.getAuthName(); + } + if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName()) + && ShimLoader.getHadoopShims().isSecureShimImpl()) { + saslServer = ShimLoader.getHadoopThriftAuthBridge().createServer( + conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB), + conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL)); + } } } public Map getSaslProperties() { Map saslProps = new HashMap(); SaslQOP saslQOP = - SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP)); + SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP)); // hadoop.rpc.protection being set to a higher level than hive.server2.thrift.rpc.protection // does not make sense in most situations. Log warning message in such cases. Map hadoopSaslProps = ShimLoader.getHadoopThriftAuthBridge(). - getHadoopSaslProperties(conf); + getHadoopSaslProperties(conf); SaslQOP hadoopSaslQOP = SaslQOP.fromString(hadoopSaslProps.get(Sasl.QOP)); if(hadoopSaslQOP.ordinal() > saslQOP.ordinal()) { LOG.warn(MessageFormat.format("\"hadoop.rpc.protection\" is set to higher security level " + - "{0} then {1} which is set to {2}", hadoopSaslQOP.toString(), - ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP.varname, saslQOP.toString())); + "{0} then {1} which is set to {2}", hadoopSaslQOP.toString(), + ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP.varname, saslQOP.toString())); } saslProps.put(Sasl.QOP, saslQOP.toString()); saslProps.put(Sasl.SERVER_AUTH, "true"); @@ -105,9 +113,7 @@ public HiveAuthFactory() throws TTransportException { } public TTransportFactory getAuthTransFactory() throws LoginException { - TTransportFactory transportFactory; - if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) { try { transportFactory = saslServer.createTransportFactory(getSaslProperties()); @@ -130,10 +136,15 @@ public TTransportFactory getAuthTransFactory() throws LoginException { public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws LoginException { - if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) { - return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service); - } else { - return PlainSaslHelper.getPlainProcessorFactory(service); + if (transportMode.equalsIgnoreCase("http")) { + return HttpAuthHelper.getAuthProcFactory(service); + } + else { + if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) { + return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service); + } else { + return PlainSaslHelper.getPlainProcessorFactory(service); + } } } @@ -145,14 +156,11 @@ public String getRemoteUser() { } } - /* perform kerberos login using the hadoop shim API if the configuration is available */ + // Perform kerberos login using the hadoop shim API if the configuration is available public static void loginFromKeytab(HiveConf hiveConf) throws IOException { String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL); String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB); - if (principal.isEmpty() && keyTabFile.isEmpty()) { - // no security configuration available - return; - } else if (!principal.isEmpty() && !keyTabFile.isEmpty()) { + if (!principal.isEmpty() && !keyTabFile.isEmpty()) { ShimLoader.getHadoopShims().loginUserFromKeytab(principal, keyTabFile); } else { throw new IOException ("HiveServer2 kerberos principal or keytab is not correctly configured"); diff --git a/service/src/java/org/apache/hive/service/auth/HttpAuthHelper.java b/service/src/java/org/apache/hive/service/auth/HttpAuthHelper.java new file mode 100644 index 0000000..a34f032 --- /dev/null +++ b/service/src/java/org/apache/hive/service/auth/HttpAuthHelper.java @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package org.apache.hive.service.auth; + +import org.apache.hive.service.cli.thrift.TCLIService; +import org.apache.hive.service.cli.thrift.TCLIService.Iface; +import org.apache.hive.service.cli.thrift.ThriftCLIService; +import org.apache.thrift.TProcessor; +import org.apache.thrift.TProcessorFactory; +import org.apache.thrift.transport.TTransport; + +public class HttpAuthHelper { + + public static final String WWW_AUTHENTICATE = "WWW-Authenticate"; + public static final String AUTHORIZATION = "Authorization"; + public static final String NEGOTIATE = "Negotiate"; + public static final String COMPLETE = "Complete"; + + private static class HttpCLIServiceProcessorFactory extends TProcessorFactory { + private final ThriftCLIService service; + + public HttpCLIServiceProcessorFactory(ThriftCLIService service) { + super(null); + this.service = service; + } + + @Override + public TProcessor getProcessor(TTransport trans) { + TProcessor processor = new TCLIService.Processor(service); + return processor; + } + } + + public static TProcessorFactory getAuthProcFactory(ThriftCLIService service) { + return new HttpCLIServiceProcessorFactory(service); + } + +} diff --git a/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java b/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java new file mode 100644 index 0000000..3c9bb04 --- /dev/null +++ b/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java @@ -0,0 +1,42 @@ + +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. See accompanying LICENSE file. + */ +package org.apache.hive.service.auth; + +public class HttpAuthenticationException extends Exception{ + static final long serialVersionUID = 0; + + /** + * @param cause original exception. + */ + public HttpAuthenticationException(Throwable cause) { + super(cause); + } + + /** + * @param msg exception message. + */ + public HttpAuthenticationException(String msg) { + super(msg); + } + + /** + * @param msg exception message. + * @param cause original exception. + */ + public HttpAuthenticationException(String msg, Throwable cause) { + super(msg, cause); + } + +} diff --git a/service/src/java/org/apache/hive/service/cli/CLIService.java b/service/src/java/org/apache/hive/service/cli/CLIService.java index 56b357a..c88ec79 100644 --- a/service/src/java/org/apache/hive/service/cli/CLIService.java +++ b/service/src/java/org/apache/hive/service/cli/CLIService.java @@ -30,11 +30,13 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.service.CompositeService; import org.apache.hive.service.ServiceException; import org.apache.hive.service.auth.HiveAuthFactory; @@ -59,7 +61,7 @@ private HiveConf hiveConf; private SessionManager sessionManager; private IMetaStoreClient metastoreClient; - private String serverUserName = null; + private UserGroupInformation serviceUGI; public CLIService() { @@ -72,16 +74,25 @@ public synchronized void init(HiveConf hiveConf) { sessionManager = new SessionManager(); addService(sessionManager); - try { - HiveAuthFactory.loginFromKeytab(hiveConf); - serverUserName = ShimLoader.getHadoopShims(). - getShortUserName(ShimLoader.getHadoopShims().getUGIForConf(hiveConf)); - } catch (IOException e) { - throw new ServiceException("Unable to login to kerberos with given principal/keytab", e); - } catch (LoginException e) { - throw new ServiceException("Unable to login to kerberos with given principal/keytab", e); + /** + * If auth mode is Kerberos, do a kerberos login for the service from the keytab + */ + if (hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION) + .equalsIgnoreCase(HiveAuthFactory.AuthTypes.KERBEROS.toString())) { + try { + HiveAuthFactory.loginFromKeytab(hiveConf); + this.serviceUGI = ShimLoader.getHadoopShims().getUGIForConf(hiveConf); + } catch (IOException e) { + throw new ServiceException("Unable to login to kerberos with given principal/keytab", e); + } catch (LoginException e) { + throw new ServiceException("Unable to login to kerberos with given principal/keytab", e); + } + super.init(hiveConf); } - super.init(hiveConf); + } + + public UserGroupInformation getServiceUGI() { + return this.serviceUGI; } @Override @@ -123,7 +134,7 @@ public SessionHandle openSession(TProtocolVersion protocol, String username, Str public SessionHandle openSessionWithImpersonation(TProtocolVersion protocol, String username, String password, Map configuration, String delegationToken) - throws HiveSQLException { + throws HiveSQLException { SessionHandle sessionHandle = sessionManager.openSession(protocol, username, password, configuration, true, delegationToken); LOG.debug(sessionHandle + ": openSession()"); @@ -146,9 +157,9 @@ public SessionHandle openSession(String username, String password, Map configuration, - String delegationToken) throws HiveSQLException { + String delegationToken) throws HiveSQLException { SessionHandle sessionHandle = sessionManager.openSession(SERVER_VERSION, username, password, configuration, - true, delegationToken); + true, delegationToken); LOG.debug(sessionHandle + ": openSession()"); return sessionHandle; } @@ -310,7 +321,7 @@ public OperationStatus getOperationStatus(OperationHandle opHandle) public void cancelOperation(OperationHandle opHandle) throws HiveSQLException { sessionManager.getOperationManager().getOperation(opHandle) - .getParentSession().cancelOperation(opHandle); + .getParentSession().cancelOperation(opHandle); LOG.debug(opHandle + ": cancelOperation()"); } @@ -321,7 +332,7 @@ public void cancelOperation(OperationHandle opHandle) public void closeOperation(OperationHandle opHandle) throws HiveSQLException { sessionManager.getOperationManager().getOperation(opHandle) - .getParentSession().closeOperation(opHandle); + .getParentSession().closeOperation(opHandle); LOG.debug(opHandle + ": closeOperation"); } diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java index 6fbc847..4b8cf78 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java @@ -40,7 +40,7 @@ public ThriftBinaryCLIService(CLIService cliService) { public void run() { try { hiveAuthFactory = new HiveAuthFactory(); - TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory(); + TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory(); TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this); String portString = System.getenv("HIVE_SERVER2_THRIFT_PORT"); diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java index a6ff6ce..b00553a 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java @@ -20,9 +20,13 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Shell; +import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes; import org.apache.hive.service.cli.CLIService; +import org.apache.thrift.TProcessor; +import org.apache.thrift.TProcessorFactory; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TProtocolFactory; import org.apache.thrift.server.TServlet; @@ -59,32 +63,20 @@ public void run() { minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MIN_WORKER_THREADS); maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_WORKER_THREADS); - String httpPath = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH); - // The config parameter can be like "path", "/path", "/path/", "path/*", "/path1/path2/*" and so on. - // httpPath should end up as "/*", "/path/*" or "/path1/../pathN/*" - if(httpPath == null || httpPath.equals("")) { - httpPath = "/*"; - } - else { - if(!httpPath.startsWith("/")) { - httpPath = "/" + httpPath; - } - if(httpPath.endsWith("/")) { - httpPath = httpPath + "*"; - } - if(!httpPath.endsWith("/*")) { - httpPath = httpPath + "/*"; - } - } + String httpPath = getHttpPath(hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH)); httpServer = new org.eclipse.jetty.server.Server(); QueuedThreadPool threadPool = new QueuedThreadPool(); threadPool.setMinThreads(minWorkerThreads); threadPool.setMaxThreads(maxWorkerThreads); httpServer.setThreadPool(threadPool); - SelectChannelConnector connector; - Boolean useSsl = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL); + + SelectChannelConnector connector = new SelectChannelConnector();; + boolean useSsl = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL); String schemeName = useSsl ? "https" : "http"; + String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION); + // Set during the init phase if auth mode is kerberos + UserGroupInformation serviceUGI = cliService.getServiceUGI(); if (useSsl) { String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim(); @@ -97,8 +89,6 @@ public void run() { sslContextFactory.setKeyStorePath(keyStorePath); sslContextFactory.setKeyStorePassword(keyStorePassword); connector = new SslSelectChannelConnector(sslContextFactory); - } else { - connector = new SelectChannelConnector(); } connector.setPort(portNum); @@ -106,14 +96,17 @@ public void run() { connector.setReuseAddress(!Shell.WINDOWS); httpServer.addConnector(connector); - TCLIService.Processor processor = - new TCLIService.Processor(new EmbeddedThriftBinaryCLIService()); + hiveAuthFactory = new HiveAuthFactory(); + TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this); + TProcessor processor = processorFactory.getProcessor(null); TProtocolFactory protocolFactory = new TBinaryProtocol.Factory(); - TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory); + TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory, + authType, serviceUGI); final ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); context.setContextPath("/"); + httpServer.setHandler(context); context.addServlet(new ServletHolder(thriftHttpServlet), httpPath); @@ -130,15 +123,38 @@ public void run() { } /** + * The config parameter can be like "path", "/path", "/path/", "path/*", "/path1/path2/*" and so on. + * httpPath should end up as "/*", "/path/*" or "/path1/../pathN/*" + * @param httpPath + * @return + */ + private String getHttpPath(String httpPath) { + if(httpPath == null || httpPath.equals("")) { + httpPath = "/*"; + } + else { + if(!httpPath.startsWith("/")) { + httpPath = "/" + httpPath; + } + if(httpPath.endsWith("/")) { + httpPath = httpPath + "*"; + } + if(!httpPath.endsWith("/*")) { + httpPath = httpPath + "/*"; + } + } + return httpPath; + } + + /** * Verify that this configuration is supported by transportMode of HTTP * @param hiveConf */ private static void verifyHttpConfiguration(HiveConf hiveConf) { String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION); - // error out if KERBEROS or LDAP mode is being used, it is not supported - if(authType.equalsIgnoreCase(AuthTypes.KERBEROS.toString()) || - authType.equalsIgnoreCase(AuthTypes.LDAP.toString()) || + // error out if LDAP mode is being used, it is not supported + if(authType.equalsIgnoreCase(AuthTypes.LDAP.toString()) || authType.equalsIgnoreCase(AuthTypes.CUSTOM.toString())) { String msg = ConfVars.HIVE_SERVER2_AUTHENTICATION + " setting of " + authType + " is currently not supported with " + diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index e77f043..a503011 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -19,6 +19,7 @@ package org.apache.hive.service.cli.thrift; import java.io.IOException; +import java.security.PrivilegedExceptionAction; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; @@ -28,26 +29,178 @@ import org.apache.commons.codec.binary.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hive.service.auth.HiveAuthFactory.AuthTypes; +import org.apache.hive.service.auth.HttpAuthHelper; +import org.apache.hive.service.auth.HttpAuthenticationException; import org.apache.thrift.TProcessor; import org.apache.thrift.protocol.TProtocolFactory; import org.apache.thrift.server.TServlet; +import org.ietf.jgss.GSSContext; +import org.ietf.jgss.GSSCredential; +import org.ietf.jgss.GSSException; +import org.ietf.jgss.GSSManager; +import org.ietf.jgss.Oid; +/** + * + * ThriftHttpServlet + * + */ public class ThriftHttpServlet extends TServlet { private static final long serialVersionUID = 1L; public static final Log LOG = LogFactory.getLog(ThriftHttpServlet.class.getName()); + private String authType; + private UserGroupInformation serviceUGI; + public static final String AUTHORIZATION = "Authorization"; - public ThriftHttpServlet(TProcessor processor, TProtocolFactory protocolFactory) { + public ThriftHttpServlet(TProcessor processor, TProtocolFactory protocolFactory, + String authType, UserGroupInformation serviceUGI) { super(processor, protocolFactory); + this.authType = authType; + this.serviceUGI = serviceUGI; } @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - logRequestHeader(request); - super.doPost(request, response); + /** + * For a kerberos setup + */ + if(authType.equalsIgnoreCase(AuthTypes.KERBEROS.toString())) { + try { + String authHeader = verifyAndReturnAuthHeader(request); + doKerberosAuth(request, response, isClientContextComplete(authHeader)); + /** + * If the client's context was estabilished, + * and kerberos authentication did not throw any exception, + * we can pass the request to the application. + */ + if (isClientContextComplete(authHeader)) { + super.doPost(request, response); + } + else { + response.getWriter().println("Kerberos negotiation response"); + } + } + catch (HttpAuthenticationException e) { + // Send a 403 to the client + LOG.error("Error: ", e); + response.setContentType("application/x-thrift"); + response.setStatus(HttpServletResponse.SC_FORBIDDEN); + // Send the response back to the client + response.getWriter().println("Authentication Error: " + e.getMessage()); + } + } + else { + logRequestHeader(request); + super.doPost(request, response); + } + } + + /** + * Each kerberos request is supposed to have an Authorization header field. + * @param request + * @return + * @throws HttpAuthenticationException + */ + private String verifyAndReturnAuthHeader(HttpServletRequest request) + throws HttpAuthenticationException { + String authHeader = request.getHeader(HttpAuthHelper.AUTHORIZATION); + if (authHeader == null) { + throw new HttpAuthenticationException("Request contains no Authorization header."); + } + return authHeader; } + private boolean isClientContextComplete(String authHeader) { + if (authHeader.toLowerCase().contains(HttpAuthHelper.COMPLETE.toLowerCase())) { + return true; + } + return false; + } + + /** + * Do the GSS-API kerberos authentication. + * We already have a logged in subject in the form of serviceUGI, + * which GSS-API will extract information from. + * @param request + * @param response + * @param isClientContextComplete + * @return + * @throws HttpAuthenticationException + */ + private Void doKerberosAuth(HttpServletRequest request, + HttpServletResponse response, boolean isClientContextComplete) throws HttpAuthenticationException { + try { + return serviceUGI.doAs(new HttpKerberosServerAction(request, response, isClientContextComplete)); + } catch (Exception e) { + throw new HttpAuthenticationException(e); + } + } + + class HttpKerberosServerAction implements PrivilegedExceptionAction { + HttpServletRequest request; + HttpServletResponse response; + boolean isClientContextComplete; + + HttpKerberosServerAction(HttpServletRequest request, + HttpServletResponse response, boolean isClientContextComplete) { + this.request = request; + this.response = response; + this.isClientContextComplete = isClientContextComplete; + } + + @Override + public Void run() throws HttpAuthenticationException { + // Get own Kerberos credentials for accepting connection + GSSManager manager = GSSManager.getInstance(); + + try { + Oid spnegoOid = new Oid("1.3.6.1.5.5.2"); + GSSCredential serverCreds = manager.createCredential(null, + GSSCredential.DEFAULT_LIFETIME, spnegoOid, GSSCredential.ACCEPT_ONLY); + + // Create a GSS context + GSSContext gssContext = manager.createContext(serverCreds); + + // Check for Authorization header + String outTokenString = request.getHeader(HttpAuthHelper.AUTHORIZATION). + substring(HttpAuthHelper.COMPLETE.length()).trim(); + Base64 base64 = new Base64(0); + byte[] inToken = base64.decode(outTokenString); + byte[] outToken = gssContext.acceptSecContext(inToken, 0, inToken.length); + + // If client context was estabilished + if (isClientContextComplete) { + if (!gssContext.isEstablished()) { + throw new HttpAuthenticationException("Kerberos authentication failed: " + + "unable to establish context."); + } + } + // If client context was not estabilished + else { + outTokenString = new String(base64.encode(outToken)); + response.setStatus(HttpServletResponse.SC_OK); + response.setContentType("application/x-thrift"); + if (!gssContext.isEstablished()) { + // Set response status and header + response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); + } + } + response.setHeader(HttpAuthHelper.WWW_AUTHENTICATE, outTokenString); + } catch (GSSException e) { + throw new HttpAuthenticationException("Kerberos authentication failed: ", e); + } + return null; + } + } + + /** + * Request header is Base64 encoded + * @param request + */ protected void logRequestHeader(HttpServletRequest request) { String authHeaderBase64 = request.getHeader("Authorization"); if(authHeaderBase64 == null) { @@ -83,6 +236,5 @@ else if(LOG.isDebugEnabled()) { } } } - }