Index: service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java =================================================================== --- service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java (revision 1669410) +++ service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java (working copy) @@ -17,6 +17,8 @@ */ package org.apache.hive.service.auth; +import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION; + import java.io.IOException; import java.net.InetSocketAddress; import java.net.UnknownHostException; @@ -28,8 +30,10 @@ import javax.net.ssl.SSLServerSocket; import javax.security.auth.login.LoginException; +import javax.security.sasl.AuthenticationException; import javax.security.sasl.Sasl; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStore; @@ -46,6 +50,7 @@ import org.apache.hive.service.cli.thrift.ThriftCLIService; import org.apache.thrift.TProcessorFactory; import org.apache.thrift.transport.TSSLTransportFactory; +import org.apache.thrift.transport.TSaslServerTransport; import org.apache.thrift.transport.TServerSocket; import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; @@ -86,6 +91,7 @@ private String authTypeStr; private final String transportMode; private final HiveConf conf; + private String hadoopAuth; public static final String HS2_PROXY_USER = "hive.server2.proxy.user"; public static final String HS2_CLIENT_TOKEN = "hiveserver2ClientToken"; @@ -95,6 +101,11 @@ transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE); authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION); + // ShimLoader.getHadoopShims().isSecurityEnabled() will only check that· + // hadoopAuth is not simple, it does not guarantee it is kerberos + Configuration hconf = new Configuration(); + hadoopAuth = hconf.get(HADOOP_SECURITY_AUTHENTICATION, "simple"); + // In http mode we use NOSASL as the default auth type if ("http".equalsIgnoreCase(transportMode)) { if (authTypeStr == null) { @@ -104,7 +115,8 @@ if (authTypeStr == null) { authTypeStr = AuthTypes.NONE.getAuthName(); } - if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) { + if (hadoopAuth.equalsIgnoreCase("kerberos") && !authTypeStr.equalsIgnoreCase( + AuthTypes.NOSASL.getAuthName())) { saslServer = ShimLoader.getHadoopThriftAuthBridge() .createServer(conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB), conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL)); @@ -131,22 +143,40 @@ public TTransportFactory getAuthTransFactory() throws LoginException { TTransportFactory transportFactory; - if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) { + TSaslServerTransport.Factory serverTransportFactory; + + if (hadoopAuth.equalsIgnoreCase("kerberos") && !authTypeStr.equalsIgnoreCase( + AuthTypes.NOSASL.getAuthName())) { try { - transportFactory = saslServer.createTransportFactory(getSaslProperties()); + serverTransportFactory = saslServer.createSaslServerTransportFactory( + getSaslProperties()); } catch (TTransportException e) { throw new LoginException(e.getMessage()); } - } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName())) { - transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr); - } else if (authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName())) { - transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr); - } else if (authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName())) { - transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr); + if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) { + // no-op + } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName()) || + authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName()) || + authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName()) || + authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) { + try { + serverTransportFactory.addServerDefinition("PLAIN", + authTypeStr, null, new HashMap(), + new PlainSaslHelper.PlainServerCallbackHandler(authTypeStr)); + } catch (AuthenticationException e) { + throw new LoginException ("Error setting callback handler" + e); + } + } else { + throw new LoginException("Unsupported authentication type " + authTypeStr); + } + transportFactory = saslServer.wrapTransportFactory(serverTransportFactory); + } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NONE.getAuthName()) || + authTypeStr.equalsIgnoreCase(AuthTypes.LDAP.getAuthName()) || + authTypeStr.equalsIgnoreCase(AuthTypes.PAM.getAuthName()) || + authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) { + transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr); } else if (authTypeStr.equalsIgnoreCase(AuthTypes.NOSASL.getAuthName())) { transportFactory = new TTransportFactory(); - } else if (authTypeStr.equalsIgnoreCase(AuthTypes.CUSTOM.getAuthName())) { - transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr); } else { throw new LoginException("Unsupported authentication type " + authTypeStr); } Index: service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java =================================================================== --- service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java (revision 1669410) +++ service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java (working copy) @@ -74,7 +74,7 @@ throw new UnsupportedOperationException("Can't initialize class"); } - private static final class PlainServerCallbackHandler implements CallbackHandler { + public static final class PlainServerCallbackHandler implements CallbackHandler { private final AuthMethods authMethod; Index: shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java =================================================================== --- shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java (revision 1669410) +++ shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java (working copy) @@ -370,6 +370,20 @@ public TTransportFactory createTransportFactory(Map saslProps) throws TTransportException { + + TSaslServerTransport.Factory transFactory = createSaslServerTransportFactory(saslProps); + + return new TUGIAssumingTransportFactory(transFactory, realUgi); + } + + /** + * Create a TSaslServerTransport.Factory that, upon connection of a client + * socket, negotiates a Kerberized SASL transport. + * + * @param saslProps Map of SASL properties + */ + public TSaslServerTransport.Factory createSaslServerTransportFactory( + Map saslProps) throws TTransportException { // Parse out the kerberos principal, host, realm. String kerberosName = realUgi.getUserName(); final String names[] = SaslRpcServer.splitKerberosName(kerberosName); @@ -387,6 +401,15 @@ null, SaslRpcServer.SASL_DEFAULT_REALM, saslProps, new SaslDigestCallbackHandler(secretManager)); + return transFactory; + } + + /** + * Wrap a TTransportFactory in such a way that, before processing any RPC, it + * assumes the UserGroupInformation of the user authenticated by + * the SASL transport. + */ + public TTransportFactory wrapTransportFactory(TTransportFactory transFactory) { return new TUGIAssumingTransportFactory(transFactory, realUgi); }