diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index ae7cd53..6246a01 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1526,8 +1526,8 @@
"must be a proper implementation of the interface\n" +
"org.apache.hive.service.auth.PasswdAuthenticationProvider. HiveServer2\n" +
"will call its Authenticate(user, passed) method to authenticate requests.\n" +
- "The implementation may optionally extend Hadoop's\n" +
- "org.apache.hadoop.conf.Configured class to grab Hive's Configuration object."),
+ "The implementation may optionally implement Hadoop's\n" +
+ "org.apache.hadoop.conf.Configurable class to grab Hive's Configuration object."),
HIVE_SERVER2_PAM_SERVICES("hive.server2.authentication.pam.services", null,
"List of the underlying pam services that should be used when auth type is PAM\n" +
"A file with the same name must exist in /etc/pam.d"),
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java
index ece54a8..030132c 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/service/auth/TestCustomAuthentication.java
@@ -18,7 +18,6 @@
package org.apache.hive.service.auth;
import junit.framework.Assert;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hive.service.server.HiveServer2;
import org.junit.AfterClass;
@@ -96,7 +95,7 @@ public void testCustomAuthentication() throws Exception {
System.out.println(">>> PASSED testCustomAuthentication");
}
- public static class SimpleAuthenticationProviderImpl implements PasswdAuthenticationProvider {
+ public static class SimpleAuthenticationProviderImpl implements PasswordAuthenticationProvider {
private Map userMap = new HashMap();
@@ -109,7 +108,7 @@ private void init(){
}
@Override
- public void Authenticate(String user, String password) throws AuthenticationException {
+ public void authenticate(String user, String password) throws AuthenticationException {
if(!userMap.containsKey(user)){
throw new AuthenticationException("Invalid user : "+user);
diff --git a/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java b/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
index 4db0022..538743e 100644
--- a/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
+++ b/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
@@ -20,12 +20,14 @@
import javax.security.sasl.AuthenticationException;
-public class AnonymousAuthenticationProviderImpl implements PasswdAuthenticationProvider {
+/**
+ * This authentication provider allows any combination of username and password.
+ */
+public class AnonymousAuthenticationProviderImpl implements PasswordAuthenticationProvider {
@Override
- public void Authenticate(String user, String password) throws AuthenticationException {
+ public void authenticate(String user, String password) throws AuthenticationException {
// no-op authentication
- return;
}
}
diff --git a/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java b/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
index e51d4f4..be05e5c 100644
--- a/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
+++ b/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
@@ -19,15 +19,18 @@
import javax.security.sasl.AuthenticationException;
-public class AuthenticationProviderFactory {
+/**
+ * This class helps select a {@link PasswordAuthenticationProvider} for a given {@code AuthMethod}.
+ */
+public final class AuthenticationProviderFactory {
- public static enum AuthMethods {
+ public enum AuthMethods {
LDAP("LDAP"),
PAM("PAM"),
CUSTOM("CUSTOM"),
NONE("NONE");
- String authMethod;
+ private final String authMethod;
AuthMethods(String authMethod) {
this.authMethod = authMethod;
@@ -37,7 +40,8 @@ public String getAuthMethod() {
return authMethod;
}
- public static AuthMethods getValidAuthMethod(String authMethodStr) throws AuthenticationException {
+ public static AuthMethods getValidAuthMethod(String authMethodStr)
+ throws AuthenticationException {
for (AuthMethods auth : AuthMethods.values()) {
if (authMethodStr.equals(auth.getAuthMethod())) {
return auth;
@@ -47,24 +51,20 @@ public static AuthMethods getValidAuthMethod(String authMethodStr) throws Authen
}
}
- private AuthenticationProviderFactory () {
+ private AuthenticationProviderFactory() {
}
- public static PasswdAuthenticationProvider getAuthenticationProvider(AuthMethods authMethod)
- throws AuthenticationException {
- if (authMethod.equals(AuthMethods.LDAP)) {
+ public static PasswordAuthenticationProvider getAuthenticationProvider(AuthMethods authMethod)
+ throws AuthenticationException {
+ if (authMethod == AuthMethods.LDAP) {
return new LdapAuthenticationProviderImpl();
- }
- else if (authMethod.equals(AuthMethods.PAM)) {
+ } else if (authMethod == AuthMethods.PAM) {
return new PamAuthenticationProviderImpl();
- }
- else if (authMethod.equals(AuthMethods.CUSTOM)) {
+ } else if (authMethod == AuthMethods.CUSTOM) {
return new CustomAuthenticationProviderImpl();
- }
- else if (authMethod.equals(AuthMethods.NONE)) {
+ } else if (authMethod == AuthMethods.NONE) {
return new AnonymousAuthenticationProviderImpl();
- }
- else {
+ } else {
throw new AuthenticationException("Unsupported authentication method");
}
}
diff --git a/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java b/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
index 7094b89..52e9288 100644
--- a/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
+++ b/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
@@ -22,27 +22,29 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.util.ReflectionUtils;
-public class CustomAuthenticationProviderImpl
- implements PasswdAuthenticationProvider {
+/**
+ * This authentication provider implements the {@code CUSTOM} authentication. It allows a {@link
+ * PasswordAuthenticationProvider} to be specified at configuration time which may additionally
+ * implement {@link org.apache.hadoop.conf.Configurable Configurable} to grab Hive's {@link
+ * org.apache.hadoop.conf.Configuration Configuration}.
+ */
+public class CustomAuthenticationProviderImpl implements PasswordAuthenticationProvider {
- Class extends PasswdAuthenticationProvider> customHandlerClass;
- PasswdAuthenticationProvider customProvider;
+ private final PasswordAuthenticationProvider customProvider;
@SuppressWarnings("unchecked")
- CustomAuthenticationProviderImpl () {
+ CustomAuthenticationProviderImpl() {
HiveConf conf = new HiveConf();
- this.customHandlerClass = (Class extends PasswdAuthenticationProvider>)
- conf.getClass(
- HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname,
- PasswdAuthenticationProvider.class);
- this.customProvider =
- ReflectionUtils.newInstance(this.customHandlerClass, conf);
+ Class extends PasswordAuthenticationProvider> customHandlerClass =
+ (Class extends PasswordAuthenticationProvider>) conf.getClass(
+ HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname,
+ PasswordAuthenticationProvider.class);
+ customProvider = ReflectionUtils.newInstance(customHandlerClass, conf);
}
@Override
- public void Authenticate(String user, String password)
- throws AuthenticationException {
- this.customProvider.Authenticate(user, password);
+ public void authenticate(String user, String password) throws AuthenticationException {
+ customProvider.authenticate(user, password);
}
}
diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index 72b3e7e..a0f7667 100644
--- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -23,7 +23,6 @@
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.Map;
-
import javax.security.auth.login.LoginException;
import javax.security.sasl.Sasl;
@@ -41,13 +40,14 @@
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+/**
+ * This class helps in some aspects of authentication. It creates the proper Thrift classes for the
+ * given configuration as well as helps with authenticating requests.
+ */
public class HiveAuthFactory {
- private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
- public static enum AuthTypes {
+ public enum AuthTypes {
NOSASL("NOSASL"),
NONE("NONE"),
LDAP("LDAP"),
@@ -55,7 +55,7 @@
CUSTOM("CUSTOM"),
PAM("PAM");
- private String authType;
+ private final String authType;
AuthTypes(String authType) {
this.authType = authType;
@@ -65,11 +65,11 @@ public String getAuthName() {
return authType;
}
- };
+ }
- private HadoopThriftAuthBridge.Server saslServer = null;
+ private HadoopThriftAuthBridge.Server saslServer;
private String authTypeStr;
- private String transportMode;
+ private final String transportMode;
private final HiveConf conf;
public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
@@ -81,21 +81,19 @@ public HiveAuthFactory(HiveConf conf) throws TTransportException {
authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
// In http mode we use NOSASL as the default auth type
- if (transportMode.equalsIgnoreCase("http")) {
+ if ("http".equalsIgnoreCase(transportMode)) {
if (authTypeStr == null) {
authTypeStr = AuthTypes.NOSASL.getAuthName();
}
- }
- else {
+ } else {
if (authTypeStr == null) {
authTypeStr = AuthTypes.NONE.getAuthName();
}
if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())
&& ShimLoader.getHadoopShims().isSecureShimImpl()) {
- saslServer = ShimLoader.getHadoopThriftAuthBridge().createServer(
- conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
- conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL)
- );
+ saslServer = ShimLoader.getHadoopThriftAuthBridge()
+ .createServer(conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
+ conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL));
// start delegation token manager
try {
saslServer.startDelegationTokenSecretManager(conf, null);
@@ -108,8 +106,7 @@ public HiveAuthFactory(HiveConf conf) throws TTransportException {
public Map getSaslProperties() {
Map saslProps = new HashMap();
- SaslQOP saslQOP =
- SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
+ SaslQOP saslQOP = SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
saslProps.put(Sasl.QOP, saslQOP.toString());
saslProps.put(Sasl.SERVER_AUTH, "true");
return saslProps;
@@ -139,12 +136,10 @@ public TTransportFactory getAuthTransFactory() throws LoginException {
return transportFactory;
}
- public TProcessorFactory getAuthProcFactory(ThriftCLIService service)
- throws LoginException {
- if (transportMode.equalsIgnoreCase("http")) {
+ public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws LoginException {
+ if ("http".equalsIgnoreCase(transportMode)) {
return HttpAuthUtils.getAuthProcFactory(service);
- }
- else {
+ } else {
if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
} else {
@@ -154,18 +149,14 @@ public TProcessorFactory getAuthProcFactory(ThriftCLIService service)
}
public String getRemoteUser() {
- if (saslServer != null) {
- return saslServer.getRemoteUser();
- } else {
- return null;
- }
+ return saslServer == null ? null : saslServer.getRemoteUser();
}
public String getIpAddress() {
- if(saslServer != null && saslServer.getRemoteAddress() != null) {
- return saslServer.getRemoteAddress().getHostAddress();
- } else {
+ if (saslServer == null || saslServer.getRemoteAddress() == null) {
return null;
+ } else {
+ return saslServer.getRemoteAddress().getHostAddress();
}
}
@@ -173,62 +164,58 @@ public String getIpAddress() {
public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
- if (!principal.isEmpty() && !keyTabFile.isEmpty()) {
- ShimLoader.getHadoopShims().loginUserFromKeytab(principal, keyTabFile);
+ if (principal.isEmpty() || keyTabFile.isEmpty()) {
+ throw new IOException("HiveServer2 Kerberos principal or keytab is not correctly configured");
} else {
- throw new IOException ("HiveServer2 kerberos principal or keytab " +
- "is not correctly configured");
+ ShimLoader.getHadoopShims().loginUserFromKeytab(principal, keyTabFile);
}
}
- // Perform spnego login using the hadoop shim API if the configuration is available
- public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(
- HiveConf hiveConf) throws IOException {
+ // Perform SPNEGO login using the hadoop shim API if the configuration is available
+ public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(HiveConf hiveConf)
+ throws IOException {
String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
- if (!principal.isEmpty() && !keyTabFile.isEmpty()) {
- return ShimLoader.getHadoopShims().loginUserFromKeytabAndReturnUGI(
- principal, keyTabFile);
+ if (principal.isEmpty() || keyTabFile.isEmpty()) {
+ throw new IOException("HiveServer2 SPNEGO principal or keytab is not correctly configured");
} else {
- throw new IOException ("HiveServer2 SPNego principal or keytab " +
- "is not correctly configured");
+ return ShimLoader.getHadoopShims().loginUserFromKeytabAndReturnUGI(principal, keyTabFile);
}
}
- public static TTransport getSocketTransport(String host, int port, int loginTimeout)
- throws TTransportException {
+ public static TTransport getSocketTransport(String host, int port, int loginTimeout) {
return new TSocket(host, port, loginTimeout);
}
public static TTransport getSSLSocket(String host, int port, int loginTimeout)
- throws TTransportException {
+ throws TTransportException {
return TSSLTransportFactory.getClientSocket(host, port, loginTimeout);
}
public static TTransport getSSLSocket(String host, int port, int loginTimeout,
- String trustStorePath, String trustStorePassWord) throws TTransportException {
+ String trustStorePath, String trustStorePassWord) throws TTransportException {
TSSLTransportFactory.TSSLTransportParameters params =
- new TSSLTransportFactory.TSSLTransportParameters();
+ new TSSLTransportFactory.TSSLTransportParameters();
params.setTrustStore(trustStorePath, trustStorePassWord);
params.requireClientAuth(true);
return TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params);
}
public static TServerSocket getServerSocket(String hiveHost, int portNum)
- throws TTransportException {
- InetSocketAddress serverAddress = null;
- if (hiveHost != null && !hiveHost.isEmpty()) {
- serverAddress = new InetSocketAddress(hiveHost, portNum);
+ throws TTransportException {
+ InetSocketAddress serverAddress;
+ if (hiveHost == null || hiveHost.isEmpty()) {
+ serverAddress = new InetSocketAddress(portNum);
} else {
- serverAddress = new InetSocketAddress(portNum);
+ serverAddress = new InetSocketAddress(hiveHost, portNum);
}
- return new TServerSocket(serverAddress );
+ return new TServerSocket(serverAddress);
}
- public static TServerSocket getServerSSLSocket(String hiveHost, int portNum,
- String keyStorePath, String keyStorePassWord) throws TTransportException, UnknownHostException {
+ public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, String keyStorePath,
+ String keyStorePassWord) throws TTransportException, UnknownHostException {
TSSLTransportFactory.TSSLTransportParameters params =
- new TSSLTransportFactory.TSSLTransportParameters();
+ new TSSLTransportFactory.TSSLTransportParameters();
params.setKeyStore(keyStorePath, keyStorePassWord);
InetAddress serverAddress;
@@ -243,8 +230,7 @@ public static TServerSocket getServerSSLSocket(String hiveHost, int portNum,
// retrieve delegation token for the given user
public String getDelegationToken(String owner, String renewer) throws HiveSQLException {
if (saslServer == null) {
- throw new HiveSQLException(
- "Delegation token only supported over kerberos authentication");
+ throw new HiveSQLException("Delegation token only supported over kerberos authentication");
}
try {
@@ -263,8 +249,7 @@ public String getDelegationToken(String owner, String renewer) throws HiveSQLExc
// cancel given delegation token
public void cancelDelegationToken(String delegationToken) throws HiveSQLException {
if (saslServer == null) {
- throw new HiveSQLException(
- "Delegation token only supported over kerberos authentication");
+ throw new HiveSQLException("Delegation token only supported over kerberos authentication");
}
try {
saslServer.cancelDelegationToken(delegationToken);
@@ -275,8 +260,7 @@ public void cancelDelegationToken(String delegationToken) throws HiveSQLExceptio
public void renewDelegationToken(String delegationToken) throws HiveSQLException {
if (saslServer == null) {
- throw new HiveSQLException(
- "Delegation token only supported over kerberos authentication");
+ throw new HiveSQLException("Delegation token only supported over kerberos authentication");
}
try {
saslServer.renewDelegationToken(delegationToken);
@@ -287,21 +271,21 @@ public void renewDelegationToken(String delegationToken) throws HiveSQLException
public String getUserFromToken(String delegationToken) throws HiveSQLException {
if (saslServer == null) {
- throw new HiveSQLException(
- "Delegation token only supported over kerberos authentication");
+ throw new HiveSQLException("Delegation token only supported over kerberos authentication");
}
try {
return saslServer.getUserFromToken(delegationToken);
} catch (IOException e) {
- throw new HiveSQLException("Error extracting user from delegation token " + delegationToken, e);
+ throw new HiveSQLException("Error extracting user from delegation token " + delegationToken,
+ e);
}
}
public static void verifyProxyAccess(String realUser, String proxyUser, String ipAddress,
- HiveConf hiveConf) throws HiveSQLException {
- UserGroupInformation sessionUgi;
+ HiveConf hiveConf) throws HiveSQLException {
try {
+ UserGroupInformation sessionUgi;
if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
sessionUgi = ShimLoader.getHadoopShims().createProxyUser(realUser);
} else {
@@ -309,11 +293,11 @@ public static void verifyProxyAccess(String realUser, String proxyUser, String i
}
if (!proxyUser.equalsIgnoreCase(realUser)) {
ShimLoader.getHadoopShims().
- authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf);
+ authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf);
}
} catch (IOException e) {
- throw new HiveSQLException("Failed to validate proxy privilage of " + realUser +
- " for " + proxyUser, e);
+ throw new HiveSQLException(
+ "Failed to validate proxy privilege of " + realUser + " for " + proxyUser, e);
}
}
diff --git a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
index 91d7188..e511248 100644
--- a/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
+++ b/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
@@ -16,7 +16,6 @@
* limitations under the License.
*/
-
package org.apache.hive.service.auth;
import java.io.IOException;
@@ -36,24 +35,26 @@
import org.apache.thrift.transport.TTransport;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
/**
- *
- * Utility functions for http mode authentication
- *
+ * Utility functions for HTTP mode authentication.
*/
-public class HttpAuthUtils {
+public final class HttpAuthUtils {
public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
public static final String AUTHORIZATION = "Authorization";
public static final String BASIC = "Basic";
public static final String NEGOTIATE = "Negotiate";
+ private HttpAuthUtils() {
+ throw new UnsupportedOperationException("Can't initialize class");
+ }
+
public static class HttpCLIServiceProcessorFactory extends TProcessorFactory {
+
private final ThriftCLIService service;
private final HiveConf hiveConf;
private final boolean isDoAsEnabled;
@@ -61,16 +62,14 @@
public HttpCLIServiceProcessorFactory(ThriftCLIService service) {
super(null);
this.service = service;
- this.hiveConf = service.getHiveConf();
- this.isDoAsEnabled = hiveConf.getBoolVar(
- HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
+ hiveConf = service.getHiveConf();
+ isDoAsEnabled = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
}
@Override
public TProcessor getProcessor(TTransport trans) {
TProcessor baseProcessor = new TCLIService.Processor(service);
- return isDoAsEnabled ? new HttpCLIServiceUGIProcessor(baseProcessor) :
- baseProcessor;
+ return isDoAsEnabled ? new HttpCLIServiceUGIProcessor(baseProcessor) : baseProcessor;
}
}
@@ -79,67 +78,52 @@ public static TProcessorFactory getAuthProcFactory(ThriftCLIService service) {
}
/**
- *
* @return Stringified Base64 encoded kerberosAuthHeader on success
- * @throws GSSException
- * @throws IOException
- * @throws InterruptedException
*/
- public static String getKerberosServiceTicket(String principal,
- String host, String serverHttpUrl)
- throws GSSException, IOException, InterruptedException {
+ public static String getKerberosServiceTicket(String principal, String host, String serverHttpUrl)
+ throws IOException, InterruptedException {
UserGroupInformation clientUGI = getClientUGI("kerberos");
String serverPrincipal = getServerPrincipal(principal, host);
// Uses the Ticket Granting Ticket in the UserGroupInformation
- return clientUGI.doAs(new HttpKerberosClientAction(serverPrincipal,
- clientUGI.getShortUserName(), serverHttpUrl));
+ return clientUGI.doAs(
+ new HttpKerberosClientAction(serverPrincipal, clientUGI.getShortUserName(), serverHttpUrl));
}
/**
- * Get server pricipal and verify that hostname is present
- * @return
- * @throws IOException
+ * Get server principal and verify that hostname is present.
*/
- private static String getServerPrincipal(String principal, String host)
- throws IOException {
- return ShimLoader.getHadoopThriftAuthBridge().getServerPrincipal(
- principal, host);
+ private static String getServerPrincipal(String principal, String host) throws IOException {
+ return ShimLoader.getHadoopThriftAuthBridge().getServerPrincipal(principal, host);
}
/**
* JAAS login to setup the client UserGroupInformation.
- * Sets up the kerberos Ticket Granting Ticket,
- * in the client UserGroupInformation object
+ * Sets up the Kerberos Ticket Granting Ticket,
+ * in the client UserGroupInformation object.
+ *
* @return Client's UserGroupInformation
- * @throws IOException
*/
- public static UserGroupInformation getClientUGI(String authType)
- throws IOException {
+ public static UserGroupInformation getClientUGI(String authType) throws IOException {
return ShimLoader.getHadoopThriftAuthBridge().getCurrentUGIWithConf(authType);
}
- /**
- *
- * HttpKerberosClientAction
- *
- */
- public static class HttpKerberosClientAction implements
- PrivilegedExceptionAction {
- String serverPrincipal;
- String clientUserName;
- String serverHttpUrl;
+ public static class HttpKerberosClientAction implements PrivilegedExceptionAction {
+
+ private final String serverPrincipal;
+ private final String clientUserName;
+ private final String serverHttpUrl;
private final Base64 base64codec;
public static final String HTTP_RESPONSE = "HTTP_RESPONSE";
public static final String SERVER_HTTP_URL = "SERVER_HTTP_URL";
private final HttpContext httpContext;
- public HttpKerberosClientAction(String serverPrincipal,
- String clientUserName, String serverHttpUrl) {
+ public HttpKerberosClientAction(String serverPrincipal, String clientUserName,
+ String serverHttpUrl) {
this.serverPrincipal = serverPrincipal;
this.clientUserName = clientUserName;
this.serverHttpUrl = serverHttpUrl;
- this.base64codec = new Base64(0);
- this.httpContext = new BasicHttpContext();
+ base64codec = new Base64(0);
+ httpContext = new BasicHttpContext();
httpContext.setAttribute(SERVER_HTTP_URL, serverHttpUrl);
}
@@ -158,9 +142,8 @@ public String run() throws Exception {
GSSName serverName = manager.createName(serverPrincipal, krb5PrincipalOid);
// GSS credentials for client
- GSSCredential clientCreds = manager.createCredential(clientName,
- GSSCredential.DEFAULT_LIFETIME, mechOid,
- GSSCredential.INITIATE_ONLY);
+ GSSCredential clientCreds = manager.createCredential(clientName,
+ GSSCredential.DEFAULT_LIFETIME, mechOid, GSSCredential.INITIATE_ONLY);
/*
* Create a GSSContext for mutual authentication with the
@@ -170,22 +153,20 @@ public String run() throws Exception {
* use. The client chooses the mechanism to use.
* - clientCreds are the client credentials
*/
- GSSContext gssContext = manager.createContext(serverName,
- mechOid, clientCreds, GSSContext.DEFAULT_LIFETIME);
+ GSSContext gssContext =
+ manager.createContext(serverName, mechOid, clientCreds, GSSContext.DEFAULT_LIFETIME);
// Mutual authentication not r
gssContext.requestMutualAuth(false);
- // Estabilish context
+ // Establish context
byte[] inToken = new byte[0];
- byte[] outToken;
- outToken = gssContext.initSecContext(inToken, 0, inToken.length);
+ byte[] outToken = gssContext.initSecContext(inToken, 0, inToken.length);
gssContext.dispose();
// Base64 encoded and stringified token for server
- String authHeaderBase64String = new String(base64codec.encode(outToken));
- return authHeaderBase64String;
+ return new String(base64codec.encode(outToken));
}
}
}
diff --git a/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java b/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java
index bf6be47..587372e 100644
--- a/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java
+++ b/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java
@@ -14,8 +14,9 @@
package org.apache.hive.service.auth;
-public class HttpAuthenticationException extends Exception{
- static final long serialVersionUID = 0;
+public class HttpAuthenticationException extends Exception {
+
+ private static final long serialVersionUID = 0;
/**
* @param cause original exception.
@@ -32,7 +33,7 @@ public HttpAuthenticationException(String msg) {
}
/**
- * @param msg exception message.
+ * @param msg exception message.
* @param cause original exception.
*/
public HttpAuthenticationException(String msg, Throwable cause) {
diff --git a/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java b/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java
index 8368938..245d793 100644
--- a/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java
+++ b/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java
@@ -31,14 +31,11 @@
import org.apache.thrift.protocol.TProtocol;
/**
- *
- * Wraps the underlying thrift processor's process call,
+ * Wraps the underlying Thrift processor's process call,
* to assume the client user's UGI/Subject for the doAs calls.
- * Gets the client's username from a threadlocal in SessionManager which is
+ * Gets the client's username from a ThreadLocal in SessionManager which is
* set in the ThriftHttpServlet, and constructs a client UGI object from that.
- *
*/
-
public class HttpCLIServiceUGIProcessor implements TProcessor {
private final TProcessor underlyingProcessor;
@@ -46,18 +43,18 @@
public HttpCLIServiceUGIProcessor(TProcessor underlyingProcessor) {
this.underlyingProcessor = underlyingProcessor;
- this.shim = ShimLoader.getHadoopShims();
+ shim = ShimLoader.getHadoopShims();
}
@Override
public boolean process(final TProtocol in, final TProtocol out) throws TException {
- /**
- * Build the client UGI from threadlocal username [SessionManager.getUserName()].
- * The threadlocal username is set in the ThriftHttpServlet.
+ /*
+ * Build the client UGI from ThreadLocal username [SessionManager.getUserName()].
+ * The ThreadLocal username is set in the ThriftHttpServlet.
*/
- UserGroupInformation clientUgi = null;
try {
- clientUgi = shim.createRemoteUser(SessionManager.getUserName(), new ArrayList());
+ UserGroupInformation clientUgi =
+ shim.createRemoteUser(SessionManager.getUserName(), new ArrayList());
return shim.doAs(clientUgi, new PrivilegedExceptionAction() {
@Override
public Boolean run() {
@@ -68,10 +65,9 @@ public Boolean run() {
}
}
});
- }
- catch (RuntimeException rte) {
+ } catch (RuntimeException rte) {
if (rte.getCause() instanceof TException) {
- throw (TException)rte.getCause();
+ throw (TException) rte.getCause();
}
throw rte;
} catch (InterruptedException ie) {
diff --git a/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java b/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
index 4b70558..8b54b59 100644
--- a/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
+++ b/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
@@ -19,7 +19,6 @@
import java.io.IOException;
import java.util.Map;
-
import javax.security.sasl.SaslException;
import org.apache.hadoop.hive.shims.ShimLoader;
@@ -33,9 +32,14 @@
import org.apache.thrift.transport.TSaslClientTransport;
import org.apache.thrift.transport.TTransport;
-public class KerberosSaslHelper {
+public final class KerberosSaslHelper {
+
+ private KerberosSaslHelper() {
+ throw new UnsupportedOperationException("Can't initialize class");
+ }
private static class CLIServiceProcessorFactory extends TProcessorFactory {
+
private final ThriftCLIService service;
private final Server saslServer;
@@ -53,17 +57,17 @@ public TProcessor getProcessor(TTransport trans) {
}
public static TProcessorFactory getKerberosProcessorFactory(Server saslServer,
- ThriftCLIService service) {
- return new CLIServiceProcessorFactory (saslServer, service);
+ ThriftCLIService service) {
+ return new CLIServiceProcessorFactory(saslServer, service);
}
public static TTransport getKerberosTransport(String principal, String host,
- final TTransport underlyingTransport, Map saslProps, boolean assumeSubject) throws SaslException {
+ TTransport underlyingTransport, Map saslProps, boolean assumeSubject)
+ throws SaslException {
try {
- final String names[] = principal.split("[/@]");
+ String[] names = principal.split("[/@]");
if (names.length != 3) {
- throw new IllegalArgumentException("Kerberos principal should have 3 parts: "
- + principal);
+ throw new IllegalArgumentException("Kerberos principal should have 3 parts: " + principal);
}
if (assumeSubject) {
@@ -71,20 +75,21 @@ public static TTransport getKerberosTransport(String principal, String host,
} else {
HadoopThriftAuthBridge.Client authBridge =
ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
- return authBridge.createClientTransport(principal, host,
- "KERBEROS", null, underlyingTransport, saslProps);
+ return authBridge.createClientTransport(principal, host, "KERBEROS", null,
+ underlyingTransport, saslProps);
}
} catch (IOException e) {
throw new SaslException("Failed to open client transport", e);
}
}
- public static TTransport createSubjectAssumedTransport(String principal,
- TTransport underlyingTransport, Map saslProps) throws IOException {
- TTransport saslTransport = null;
- final String names[] = principal.split("[/@]");
+ public static TTransport createSubjectAssumedTransport(String principal,
+ TTransport underlyingTransport, Map saslProps) throws IOException {
+ String[] names = principal.split("[/@]");
try {
- saslTransport = new TSaslClientTransport("GSSAPI", null, names[0], names[1], saslProps, null, underlyingTransport);
+ TTransport saslTransport =
+ new TSaslClientTransport("GSSAPI", null, names[0], names[1], saslProps, null,
+ underlyingTransport);
return new TSubjectAssumingTransport(saslTransport);
} catch (SaslException se) {
throw new IOException("Could not instantiate SASL transport", se);
@@ -92,13 +97,13 @@ public static TTransport createSubjectAssumedTransport(String principal,
}
public static TTransport getTokenTransport(String tokenStr, String host,
- final TTransport underlyingTransport, Map saslProps) throws SaslException {
+ TTransport underlyingTransport, Map saslProps) throws SaslException {
HadoopThriftAuthBridge.Client authBridge =
ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
try {
- return authBridge.createClientTransport(null, host,
- "DIGEST", tokenStr, underlyingTransport, saslProps);
+ return authBridge.createClientTransport(null, host, "DIGEST", tokenStr, underlyingTransport,
+ saslProps);
} catch (IOException e) {
throw new SaslException("Failed to open client transport", e);
}
diff --git a/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java b/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
index 5342214..2a10927 100644
--- a/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
+++ b/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
@@ -18,31 +18,28 @@
package org.apache.hive.service.auth;
import java.util.Hashtable;
-
import javax.naming.Context;
import javax.naming.NamingException;
-import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.security.sasl.AuthenticationException;
import org.apache.hadoop.hive.conf.HiveConf;
-public class LdapAuthenticationProviderImpl implements PasswdAuthenticationProvider {
+public class LdapAuthenticationProviderImpl implements PasswordAuthenticationProvider {
private final String ldapURL;
private final String baseDN;
private final String ldapDomain;
- LdapAuthenticationProviderImpl () {
+ LdapAuthenticationProviderImpl() {
HiveConf conf = new HiveConf();
- this.ldapURL = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL);
- this.baseDN = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN);
- this.ldapDomain = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN);
+ ldapURL = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL);
+ baseDN = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN);
+ ldapDomain = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN);
}
@Override
- public void Authenticate(String user, String password)
- throws AuthenticationException {
+ public void authenticate(String user, String password) throws AuthenticationException {
Hashtable env = new Hashtable();
env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
@@ -51,15 +48,15 @@ public void Authenticate(String user, String password)
// If the domain is supplied, then append it. LDAP providers like Active Directory
// use a fully qualified user name like foo@bar.com.
if (ldapDomain != null) {
- user = user + "@" + ldapDomain;
+ user = user + "@" + ldapDomain;
}
// setup the security principal
String bindDN;
- if (baseDN != null) {
- bindDN = "uid=" + user + "," + baseDN;
- } else {
+ if (baseDN == null) {
bindDN = user;
+ } else {
+ bindDN = "uid=" + user + "," + baseDN;
}
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, bindDN);
@@ -67,12 +64,11 @@ public void Authenticate(String user, String password)
try {
// Create initial context
- DirContext ctx = new InitialDirContext(env);
+ Context ctx = new InitialDirContext(env);
ctx.close();
} catch (NamingException e) {
throw new AuthenticationException("Error validating LDAP user", e);
}
- return;
}
}
diff --git a/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java b/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java
index 5e48d13..79bf50e 100644
--- a/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java
+++ b/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java
@@ -20,33 +20,32 @@
import javax.security.sasl.AuthenticationException;
import net.sf.jpam.Pam;
-
import org.apache.hadoop.hive.conf.HiveConf;
-public class PamAuthenticationProviderImpl implements PasswdAuthenticationProvider {
+public class PamAuthenticationProviderImpl implements PasswordAuthenticationProvider {
private final String pamServiceNames;
- PamAuthenticationProviderImpl () {
+ PamAuthenticationProviderImpl() {
HiveConf conf = new HiveConf();
- this.pamServiceNames = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES);
+ pamServiceNames = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES);
}
@Override
- public void Authenticate(String user, String password)
- throws AuthenticationException {
+ public void authenticate(String user, String password) throws AuthenticationException {
if (pamServiceNames == null || pamServiceNames.trim().isEmpty()) {
throw new AuthenticationException("No PAM services are set.");
}
- String pamServices[] = pamServiceNames.split(",");
+ String[] pamServices = pamServiceNames.split(",");
for (String pamService : pamServices) {
Pam pam = new Pam(pamService);
boolean isAuthenticated = pam.authenticateSuccessful(user, password);
if (!isAuthenticated) {
- throw new AuthenticationException("Error authenticating with the PAM service: " + pamService);
+ throw new AuthenticationException(
+ "Error authenticating with the PAM service: " + pamService);
}
}
}
-}
\ No newline at end of file
+}
diff --git a/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java b/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
deleted file mode 100644
index 2d0da3a..0000000
--- a/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hive.service.auth;
-
-import javax.security.sasl.AuthenticationException;
-
-public interface PasswdAuthenticationProvider {
- /**
- * The Authenticate method is called by the HiveServer2 authentication layer
- * to authenticate users for their requests.
- * If a user is to be granted, return nothing/throw nothing.
- * When a user is to be disallowed, throw an appropriate {@link AuthenticationException}.
- *
- * For an example implementation, see {@link LdapAuthenticationProviderImpl}.
- *
- * @param user - The username received over the connection request
- * @param password - The password received over the connection request
- * @throws AuthenticationException - When a user is found to be
- * invalid by the implementation
- */
- void Authenticate(String user, String password) throws AuthenticationException;
-}
diff --git a/service/src/java/org/apache/hive/service/auth/PasswordAuthenticationProvider.java b/service/src/java/org/apache/hive/service/auth/PasswordAuthenticationProvider.java
new file mode 100644
index 0000000..34d3e67
--- /dev/null
+++ b/service/src/java/org/apache/hive/service/auth/PasswordAuthenticationProvider.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.service.auth;
+
+import javax.security.sasl.AuthenticationException;
+
+public interface PasswordAuthenticationProvider {
+
+ /**
+ * The authenticate method is called by the HiveServer2 authentication layer
+ * to authenticate users for their requests.
+ * If a user is to be granted, return nothing/throw nothing.
+ * When a user is to be disallowed, throw an appropriate {@link AuthenticationException}.
+ *
+ * For an example implementation, see {@link LdapAuthenticationProviderImpl}.
+ *
+ * @param user The username received over the connection request
+ * @param password The password received over the connection request
+ *
+ * @throws AuthenticationException When a user is found to be
+ * invalid by the implementation
+ */
+ void authenticate(String user, String password) throws AuthenticationException;
+}
diff --git a/service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java b/service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
index dd788c6..ea016ed 100644
--- a/service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
+++ b/service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
@@ -18,8 +18,8 @@
package org.apache.hive.service.auth;
import java.io.IOException;
+import java.security.Security;
import java.util.HashMap;
-
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
@@ -30,10 +30,8 @@
import javax.security.sasl.AuthorizeCallback;
import javax.security.sasl.SaslException;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.service.auth.PlainSaslServer.SaslPlainProvider;
import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
-import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.auth.PlainSaslServer.SaslPlainProvider;
import org.apache.hive.service.cli.thrift.TCLIService.Iface;
import org.apache.hive.service.cli.thrift.ThriftCLIService;
import org.apache.thrift.TProcessor;
@@ -43,78 +41,81 @@
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportFactory;
-public class PlainSaslHelper {
+public final class PlainSaslHelper {
+
+ private PlainSaslHelper() {
+ throw new UnsupportedOperationException("Can't initialize class");
+ }
+
+ private static final class PlainServerCallbackHandler implements CallbackHandler {
- private static class PlainServerCallbackHandler implements CallbackHandler {
private final AuthMethods authMethod;
- public PlainServerCallbackHandler(String authMethodStr) throws AuthenticationException {
+
+ PlainServerCallbackHandler(String authMethodStr) throws AuthenticationException {
authMethod = AuthMethods.getValidAuthMethod(authMethodStr);
}
@Override
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
- String userName = null;
- String passWord = null;
+ String username = null;
+ String password = null;
AuthorizeCallback ac = null;
- for (int i = 0; i < callbacks.length; i++) {
- if (callbacks[i] instanceof NameCallback) {
- NameCallback nc = (NameCallback)callbacks[i];
- userName = nc.getName();
- } else if (callbacks[i] instanceof PasswordCallback) {
- PasswordCallback pc = (PasswordCallback)callbacks[i];
- passWord = new String(pc.getPassword());
- } else if (callbacks[i] instanceof AuthorizeCallback) {
- ac = (AuthorizeCallback) callbacks[i];
+ for (Callback callback : callbacks) {
+ if (callback instanceof NameCallback) {
+ NameCallback nc = (NameCallback) callback;
+ username = nc.getName();
+ } else if (callback instanceof PasswordCallback) {
+ PasswordCallback pc = (PasswordCallback) callback;
+ password = new String(pc.getPassword());
+ } else if (callback instanceof AuthorizeCallback) {
+ ac = (AuthorizeCallback) callback;
} else {
- throw new UnsupportedCallbackException(callbacks[i]);
+ throw new UnsupportedCallbackException(callback);
}
}
- PasswdAuthenticationProvider provider =
- AuthenticationProviderFactory.getAuthenticationProvider(authMethod);
- provider.Authenticate(userName, passWord);
+ PasswordAuthenticationProvider provider =
+ AuthenticationProviderFactory.getAuthenticationProvider(authMethod);
+ provider.authenticate(username, password);
if (ac != null) {
ac.setAuthorized(true);
}
}
}
- public static class PlainClientbackHandler implements CallbackHandler {
+ public static class PlainCallbackHandler implements CallbackHandler {
- private final String userName;
- private final String passWord;
+ private final String username;
+ private final String password;
- public PlainClientbackHandler (String userName, String passWord) {
- this.userName = userName;
- this.passWord = passWord;
+ public PlainCallbackHandler(String username, String password) {
+ this.username = username;
+ this.password = password;
}
@Override
- public void handle(Callback[] callbacks)
- throws IOException, UnsupportedCallbackException {
- AuthorizeCallback ac = null;
- for (int i = 0; i < callbacks.length; i++) {
- if (callbacks[i] instanceof NameCallback) {
- NameCallback nameCallback = (NameCallback)callbacks[i];
- nameCallback.setName(userName);
- } else if (callbacks[i] instanceof PasswordCallback) {
- PasswordCallback passCallback = (PasswordCallback) callbacks[i];
- passCallback.setPassword(passWord.toCharArray());
+ public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
+ for (Callback callback : callbacks) {
+ if (callback instanceof NameCallback) {
+ NameCallback nameCallback = (NameCallback) callback;
+ nameCallback.setName(username);
+ } else if (callback instanceof PasswordCallback) {
+ PasswordCallback passCallback = (PasswordCallback) callback;
+ passCallback.setPassword(password.toCharArray());
} else {
- throw new UnsupportedCallbackException(callbacks[i]);
+ throw new UnsupportedCallbackException(callback);
}
}
}
}
- private static class SQLPlainProcessorFactory extends TProcessorFactory {
+ private static final class SQLPlainProcessorFactory extends TProcessorFactory {
+
private final ThriftCLIService service;
- private final HiveConf conf;
- public SQLPlainProcessorFactory(ThriftCLIService service) {
+ SQLPlainProcessorFactory(ThriftCLIService service) {
super(null);
this.service = service;
- this.conf = service.getHiveConf();
}
@Override
@@ -129,27 +130,25 @@ public static TProcessorFactory getPlainProcessorFactory(ThriftCLIService servic
// Register Plain SASL server provider
static {
- java.security.Security.addProvider(new SaslPlainProvider());
+ Security.addProvider(new SaslPlainProvider());
}
public static TTransportFactory getPlainTransportFactory(String authTypeStr)
- throws LoginException {
+ throws LoginException {
TSaslServerTransport.Factory saslFactory = new TSaslServerTransport.Factory();
try {
- saslFactory.addServerDefinition("PLAIN",
- authTypeStr, null, new HashMap(),
- new PlainServerCallbackHandler(authTypeStr));
+ saslFactory.addServerDefinition("PLAIN", authTypeStr, null, new HashMap(),
+ new PlainServerCallbackHandler(authTypeStr));
} catch (AuthenticationException e) {
- throw new LoginException ("Error setting callback handler" + e);
+ throw new LoginException("Error setting callback handler" + e);
}
return saslFactory;
}
- public static TTransport getPlainTransport(String userName, String passwd,
- final TTransport underlyingTransport) throws SaslException {
- return new TSaslClientTransport("PLAIN", null,
- null, null, new HashMap(),
- new PlainClientbackHandler(userName, passwd), underlyingTransport);
+ public static TTransport getPlainTransport(String username, String password,
+ TTransport underlyingTransport) throws SaslException {
+ return new TSaslClientTransport("PLAIN", null, null, null, new HashMap(),
+ new PlainCallbackHandler(username, password), underlyingTransport);
}
}
diff --git a/service/src/java/org/apache/hive/service/auth/PlainSaslServer.java b/service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
index 79c44e6..cd675da 100644
--- a/service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
+++ b/service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
@@ -18,10 +18,10 @@
package org.apache.hive.service.auth;
import java.io.IOException;
+import java.security.Provider;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Map;
-
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
@@ -35,27 +35,26 @@
import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
/**
- *
- * PlainSaslServer.
- * Sun JDK only provides PLAIN client and not server. This class implements the Plain SASL server
- * conforming to RFC #4616 (http://www.ietf.org/rfc/rfc4616.txt)
+ * Sun JDK only provides a PLAIN client and no server. This class implements the Plain SASL server
+ * conforming to RFC #4616 (http://www.ietf.org/rfc/rfc4616.txt).
*/
-public class PlainSaslServer implements SaslServer {
- private final AuthMethods authMethod;
+public class PlainSaslServer implements SaslServer {
+
+ public static final String PLAIN_METHOD = "PLAIN";
private String user;
- private String passwd;
- private String authzId;
private final CallbackHandler handler;
PlainSaslServer(CallbackHandler handler, String authMethodStr) throws SaslException {
this.handler = handler;
- this.authMethod = AuthMethods.getValidAuthMethod(authMethodStr);
+ AuthMethods.getValidAuthMethod(authMethodStr);
}
+ @Override
public String getMechanismName() {
- return "PLAIN";
+ return PLAIN_METHOD;
}
+ @Override
public byte[] evaluateResponse(byte[] response) throws SaslException {
try {
// parse the response
@@ -68,28 +67,29 @@ public String getMechanismName() {
tokenList.addLast(messageToken.toString());
messageToken = new StringBuilder();
} else {
- messageToken.append((char)b);
+ messageToken.append((char) b);
}
}
tokenList.addLast(messageToken.toString());
// validate response
- if ((tokenList.size() < 2) || (tokenList.size() > 3)) {
+ if (tokenList.size() < 2 || tokenList.size() > 3) {
throw new SaslException("Invalid message format");
}
- passwd = tokenList.removeLast();
+ String passwd = tokenList.removeLast();
user = tokenList.removeLast();
// optional authzid
- if (!tokenList.isEmpty()) {
- authzId = tokenList.removeLast();
- } else {
+ String authzId;
+ if (tokenList.isEmpty()) {
authzId = user;
+ } else {
+ authzId = tokenList.removeLast();
}
if (user == null || user.isEmpty()) {
- throw new SaslException("No user name provide");
+ throw new SaslException("No user name provided");
}
if (passwd == null || passwd.isEmpty()) {
- throw new SaslException("No password name provide");
+ throw new SaslException("No password name provided");
}
NameCallback nameCallback = new NameCallback("User");
@@ -98,7 +98,7 @@ public String getMechanismName() {
pcCallback.setPassword(passwd.toCharArray());
AuthorizeCallback acCallback = new AuthorizeCallback(user, authzId);
- Callback[] cbList = new Callback[] {nameCallback, pcCallback, acCallback};
+ Callback[] cbList = {nameCallback, pcCallback, acCallback};
handler.handle(cbList);
if (!acCallback.isAuthorized()) {
throw new SaslException("Authentication failed");
@@ -113,49 +113,62 @@ public String getMechanismName() {
return null;
}
+ @Override
public boolean isComplete() {
return user != null;
}
+ @Override
public String getAuthorizationID() {
return user;
}
+ @Override
public byte[] unwrap(byte[] incoming, int offset, int len) {
- throw new UnsupportedOperationException();
+ throw new UnsupportedOperationException();
}
+ @Override
public byte[] wrap(byte[] outgoing, int offset, int len) {
throw new UnsupportedOperationException();
}
+ @Override
public Object getNegotiatedProperty(String propName) {
return null;
}
+ @Override
public void dispose() {}
public static class SaslPlainServerFactory implements SaslServerFactory {
- public SaslServer createSaslServer(
- String mechanism, String protocol, String serverName, Map props, CallbackHandler cbh)
- {
- if ("PLAIN".equals(mechanism)) {
+ @Override
+ public SaslServer createSaslServer(String mechanism, String protocol, String serverName,
+ Map props, CallbackHandler cbh) {
+ if (PLAIN_METHOD.equals(mechanism)) {
try {
return new PlainSaslServer(cbh, protocol);
} catch (SaslException e) {
+ /* This is to fulfill the contract of the interface which states that an exception shall
+ be thrown when a SaslServer cannot be created due to an error but null should be
+ returned when a Server can't be created due to the parameters supplied. And the only
+ thing PlainSaslServer can fail on is a non-supported authentication mechanism.
+ That's why we return null instead of throwing the Exception */
return null;
}
}
return null;
}
+ @Override
public String[] getMechanismNames(Map props) {
- return new String[] { "PLAIN" };
+ return new String[] {PLAIN_METHOD};
}
}
- public static class SaslPlainProvider extends java.security.Provider {
+ public static class SaslPlainProvider extends Provider {
+
public SaslPlainProvider() {
super("HiveSaslPlain", 1.0, "Hive Plain SASL provider");
put("SaslServerFactory.PLAIN", SaslPlainServerFactory.class.getName());
diff --git a/service/src/java/org/apache/hive/service/auth/SaslQOP.java b/service/src/java/org/apache/hive/service/auth/SaslQOP.java
index 0b2e7a2..479ebf3 100644
--- a/service/src/java/org/apache/hive/service/auth/SaslQOP.java
+++ b/service/src/java/org/apache/hive/service/auth/SaslQOP.java
@@ -22,7 +22,7 @@
import java.util.Map;
/**
- * Possible values of SASL quality-of-protection value.
+ * Possible values of SASL quality-of-protection value.
*/
public enum SaslQOP {
AUTH("auth"), // Authentication only.
@@ -32,14 +32,15 @@
public final String saslQop;
- private static final Map strToEnum
- = new HashMap();
+ private static final Map STR_TO_ENUM = new HashMap();
+
static {
- for (SaslQOP SaslQOP : values())
- strToEnum.put(SaslQOP.toString(), SaslQOP);
+ for (SaslQOP saslQop : values()) {
+ STR_TO_ENUM.put(saslQop.toString(), saslQop);
+ }
}
- private SaslQOP(final String saslQop) {
+ SaslQOP(String saslQop) {
this.saslQop = saslQop;
}
@@ -48,13 +49,13 @@ public String toString() {
}
public static SaslQOP fromString(String str) {
- if(str != null) {
+ if (str != null) {
str = str.toLowerCase();
}
- SaslQOP saslQOP = strToEnum.get(str);
- if(saslQOP == null) {
- throw new IllegalArgumentException("Unknown auth type: " + str + " Allowed values are: "
- + strToEnum.keySet());
+ SaslQOP saslQOP = STR_TO_ENUM.get(str);
+ if (saslQOP == null) {
+ throw new IllegalArgumentException(
+ "Unknown auth type: " + str + " Allowed values are: " + STR_TO_ENUM.keySet());
}
return saslQOP;
}
diff --git a/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java b/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
index 3be4b4b..19722f2 100644
--- a/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
+++ b/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
@@ -34,10 +34,12 @@
* This class is responsible for setting the ipAddress for operations executed via HiveServer2.
*
*
- * - Ipaddress is only set for operations that calls listeners with hookContext @see ExecuteWithHookContext.
- * - Ipaddress is only set if the underlying transport mechanism is socket.
+ * - IP address is only set for operations that calls listeners with hookContext
+ * - IP address is only set if the underlying transport mechanism is socket
*
*
+ *
+ * @see org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext
*/
public class TSetIpAddressProcessor extends TCLIService.Processor {
@@ -54,26 +56,26 @@ public boolean process(final TProtocol in, final TProtocol out) throws TExceptio
try {
return super.process(in, out);
} finally {
- threadLocalUserName.remove();
- threadLocalIpAddress.remove();
+ THREAD_LOCAL_USER_NAME.remove();
+ THREAD_LOCAL_IP_ADDRESS.remove();
}
}
private void setUserName(final TProtocol in) {
TTransport transport = in.getTransport();
if (transport instanceof TSaslServerTransport) {
- String userName = ((TSaslServerTransport)transport).getSaslServer().getAuthorizationID();
- threadLocalUserName.set(userName);
+ String userName = ((TSaslServerTransport) transport).getSaslServer().getAuthorizationID();
+ THREAD_LOCAL_USER_NAME.set(userName);
}
}
protected void setIpAddress(final TProtocol in) {
TTransport transport = in.getTransport();
TSocket tSocket = getUnderlyingSocketFromTransport(transport);
- if (tSocket != null) {
- threadLocalIpAddress.set(tSocket.getSocket().getInetAddress().toString());
- } else {
+ if (tSocket == null) {
LOGGER.warn("Unknown Transport, cannot determine ipAddress");
+ } else {
+ THREAD_LOCAL_IP_ADDRESS.set(tSocket.getSocket().getInetAddress().toString());
}
}
@@ -92,14 +94,14 @@ private TSocket getUnderlyingSocketFromTransport(TTransport transport) {
return null;
}
- private static ThreadLocal threadLocalIpAddress = new ThreadLocal() {
+ private static final ThreadLocal THREAD_LOCAL_IP_ADDRESS = new ThreadLocal() {
@Override
protected synchronized String initialValue() {
return null;
}
};
- private static ThreadLocal threadLocalUserName = new ThreadLocal(){
+ private static final ThreadLocal THREAD_LOCAL_USER_NAME = new ThreadLocal() {
@Override
protected synchronized String initialValue() {
return null;
@@ -107,10 +109,10 @@ protected synchronized String initialValue() {
};
public static String getUserIpAddress() {
- return threadLocalIpAddress.get();
+ return THREAD_LOCAL_IP_ADDRESS.get();
}
public static String getUserName() {
- return threadLocalUserName.get();
+ return THREAD_LOCAL_USER_NAME.get();
}
-}
\ No newline at end of file
+}
diff --git a/service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java b/service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java
index d0468b3..2422e86 100644
--- a/service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java
+++ b/service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java
@@ -22,7 +22,6 @@
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
-
import javax.security.auth.Subject;
import org.apache.hadoop.hive.thrift.TFilterTransport;
@@ -30,43 +29,42 @@
import org.apache.thrift.transport.TTransportException;
/**
- *
- * This is used on the client side, where the API explicitly opens a transport to
- * the server using the Subject.doAs()
- */
- public class TSubjectAssumingTransport extends TFilterTransport {
+ * This is used on the client side, where the API explicitly opens a transport to
+ * the server using the Subject.doAs().
+ */
+public class TSubjectAssumingTransport extends TFilterTransport {
- public TSubjectAssumingTransport(TTransport wrapped) {
- super(wrapped);
- }
+ public TSubjectAssumingTransport(TTransport wrapped) {
+ super(wrapped);
+ }
- @Override
- public void open() throws TTransportException {
- try {
- AccessControlContext context = AccessController.getContext();
- Subject subject = Subject.getSubject(context);
- Subject.doAs(subject, new PrivilegedExceptionAction() {
- public Void run() {
- try {
- wrapped.open();
- } catch (TTransportException tte) {
- // Wrap the transport exception in an RTE, since Subject.doAs() then goes
- // and unwraps this for us out of the doAs block. We then unwrap one
- // more time in our catch clause to get back the TTE. (ugh)
- throw new RuntimeException(tte);
- }
- return null;
- }
- });
- } catch (PrivilegedActionException ioe) {
- throw new RuntimeException("Received an ioe we never threw!", ioe);
- } catch (RuntimeException rte) {
- if (rte.getCause() instanceof TTransportException) {
- throw (TTransportException)rte.getCause();
- } else {
- throw rte;
- }
- }
- }
+ @Override
+ public void open() throws TTransportException {
+ try {
+ AccessControlContext context = AccessController.getContext();
+ Subject subject = Subject.getSubject(context);
+ Subject.doAs(subject, new PrivilegedExceptionAction() {
+ public Void run() {
+ try {
+ wrapped.open();
+ } catch (TTransportException tte) {
+ // Wrap the transport exception in an RTE, since Subject.doAs() then goes
+ // and unwraps this for us out of the doAs block. We then unwrap one
+ // more time in our catch clause to get back the TTE. (ugh)
+ throw new RuntimeException(tte);
+ }
+ return null;
+ }
+ });
+ } catch (PrivilegedActionException ioe) {
+ throw new RuntimeException("Received an ioe we never threw!", ioe);
+ } catch (RuntimeException rte) {
+ if (rte.getCause() instanceof TTransportException) {
+ throw (TTransportException) rte.getCause();
+ } else {
+ throw rte;
+ }
+ }
+ }
- }
+}
diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
index 99ef8bc..104c366 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java
@@ -37,7 +37,7 @@
import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.auth.HttpAuthUtils;
import org.apache.hive.service.auth.HttpAuthenticationException;
-import org.apache.hive.service.auth.PasswdAuthenticationProvider;
+import org.apache.hive.service.auth.PasswordAuthenticationProvider;
import org.apache.hive.service.cli.session.SessionManager;
import org.apache.thrift.TProcessor;
import org.apache.thrift.protocol.TProtocolFactory;
@@ -128,9 +128,9 @@ private String doPasswdAuth(HttpServletRequest request, String authType)
if (!authType.equalsIgnoreCase(HiveAuthFactory.AuthTypes.NOSASL.toString())) {
try {
AuthMethods authMethod = AuthMethods.getValidAuthMethod(authType);
- PasswdAuthenticationProvider provider =
+ PasswordAuthenticationProvider provider =
AuthenticationProviderFactory.getAuthenticationProvider(authMethod);
- provider.Authenticate(userName, getPassword(request, authType));
+ provider.authenticate(userName, getPassword(request, authType));
} catch (Exception e) {
throw new HttpAuthenticationException(e);