diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 6d0cf15..a57516d 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1992,6 +1992,28 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { "hive.security.authenticator.manager,hive.security.authorization.manager,hive.users.in.admin.role", "Comma separated list of configuration options which are immutable at runtime"), + HIVE_SERVER2_KERBEROS_USE_SSL("hive.server2.kerberos.use.SSL", false, + "Set this to true for using SSL encryption for SASL(PLAIN) with Kerberos in HiveServer2."), + HIVE_SERVER2_KERBEROS_SSL_PORT("hive.server2.kerberos.ssl.port", 10010, + "Port number of HiveServer2 for using SSL encryption for SASL(PLAIN) with Kerberos."), + HIVE_SERVER2_KERBEROS_SSL_KEYSTORE_PATH("hive.server2.kerberos.ssl.keystore.path", "", + "SSL certificate keystore location for using SSL encryption for SASL(PLAIN) with Kerberos."), + HIVE_SERVER2_KERBEROS_SSL_KEYSTORE_PASSWORD("hive.server2.kerberos.ssl.keystore.password", "", + "SSL certificate keystore password for using SSL encryption for SASL(PLAIN) with Kerberos."), + HIVE_SERVER2_KERBEROS_SSL_MIN_WORKER_THREADS("hive.server2.kerberos.ssl.min.worker.threads", 5, + "Minimum number of SASL(PLAIN) worker threads with Kerberos"), + HIVE_SERVER2_KERBEROS_SSL_MAX_WORKER_THREADS("hive.server2.kerberos.ssl.max.worker.threads", 500, + "Maximum number of SASL(PLAIN) worker threads with Kerberos"), + HIVE_SERVER2_KERBEROS_SSL_CUSTOM_AUTHENTICATION_CLASS("hive.server2.kerberos.ssl.custom.authentication.class", null, + "Custom authentication class. Used when property\n" + + "'hive.server2.authentication' is set to 'KERBEROS' and" + + "'hive.server2.kerberos.use.SSL' is set to 'true'. Provided class\n" + + "must be a proper implementation of the interface\n" + + "org.apache.hive.service.auth.PasswdAuthenticationProvider. HiveServer2\n" + + "will call its Authenticate(user, passed) method to authenticate requests.\n" + + "The implementation may optionally implement Hadoop's\n" + + "org.apache.hadoop.conf.Configurable class to grab Hive's Configuration object."), + // If this is set all move tasks at the end of a multi-insert query will only begin once all // outputs are ready HIVE_MULTI_INSERT_MOVE_TASKS_SHARE_DEPENDENCIES( diff --git itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbWithCustomAuthOverSSL.java itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbWithCustomAuthOverSSL.java new file mode 100644 index 0000000..0087145 --- /dev/null +++ itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbWithCustomAuthOverSSL.java @@ -0,0 +1,157 @@ +package org.apache.hive.minikdc; + +import static org.junit.Assert.*; + +import java.io.File; +import java.net.URLEncoder; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.HashMap; +import java.util.Map; + +import javax.security.sasl.AuthenticationException; + +import junit.framework.Assert; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.thrift.KrbCustomAuthenticationProvider; +import org.apache.hive.jdbc.TestSSL; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.apache.hive.jdbc.miniHS2.MiniHS2.MiniClusterType; +import org.apache.hive.service.auth.PasswdAuthenticationProvider; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TestJdbWithCustomAuthOverSSL { + private static final Logger LOG = LoggerFactory.getLogger(TestSSL.class); + private static final String KEY_STORE_NAME = "keystore.jks"; + private static final String TRUST_STORE_NAME = "truststore.jks"; + private static final String KEY_STORE_PASSWORD = "HiveJdbc"; + private static final String JAVA_TRUST_STORE_PROP = "javax.net.ssl.trustStore"; + private static final String JAVA_TRUST_STORE_PASS_PROP = "javax.net.ssl.trustStorePassword"; + //private static final String KERBEROS_SSL_PORT = "10010"; + private static Integer KERBEROS_SSL_PORT; + private static final String HS2_BINARY_MODE = "binary"; + + private MiniHiveKdc miniHiveKdc; + private MiniHS2 miniHS2 = null; + private static HiveConf hiveConf = new HiveConf(); + private Connection hs2Conn = null; + private Map confOverlay; + private String dataFileDir = hiveConf.get("test.data.files"); + private final String SSL_CONN_PARAMS = ";ssl=true;sslTrustStore=" + URLEncoder.encode(dataFileDir + File.separator + + TRUST_STORE_NAME) + ";trustStorePassword=" + KEY_STORE_PASSWORD; + + @BeforeClass + public static void beforeTest() throws Exception { + Class.forName(MiniHS2.getJdbcDriverName()); + } + + @Before + public void setUp() throws Exception { + DriverManager.setLoginTimeout(0); + + KERBEROS_SSL_PORT = MetaStoreUtils.findFreePort(); + confOverlay = new HashMap(); + setCustomAuthOverSslConfWithKrbOverlay(confOverlay); + + miniHiveKdc = MiniHiveKdc.getMiniHiveKdc(hiveConf); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf); + miniHS2.start(confOverlay); + } + + @After + public void tearDown() throws Exception { + if (hs2Conn != null) { + try { + hs2Conn.close(); + } catch (Exception e) { + // Ignore shutdown errors since there are negative tests + } + } + if (miniHS2 != null && miniHS2.isStarted()) { + miniHS2.stop(); + } + System.clearProperty(JAVA_TRUST_STORE_PROP); + System.clearProperty(JAVA_TRUST_STORE_PASS_PROP); + } + + @Test + public void testKerberosAuthentication() throws Exception { + // JDBC connection to HiveServer2 with Kerberos + hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL()); + hs2Conn.close(); + } + + @Test + public void testCustomAuthenticationOverSSL() throws Exception { + // JDBC connection with ID/PASSWD over SSL with Kerberos (Custom class) + String url = "jdbc:hive2://" + miniHS2.getHost() + ":" + KERBEROS_SSL_PORT + "/default" + + ";ssl=true;sslTrustStore=" + dataFileDir + File.separator + TRUST_STORE_NAME + + ";trustStorePassword=" + KEY_STORE_PASSWORD; + + // wrong ID/PASSWD + try { + hs2Conn = DriverManager.getConnection(url, "wronguser", "pwd"); + } catch(SQLException e) { + assertNotNull(e.getMessage()); + assertTrue(e.getMessage(), e.getMessage().contains("Peer indicated failure: Error validating the login")); + } + + // success ID/PASSWD + hs2Conn = DriverManager.getConnection(url, "hiveuser", "hive"); + hs2Conn.close(); + + System.out.println(">>> PASSED testCustomAuthenticationOverSSL"); + } + + + public static class SimpleCustomAuthWithKerberosProviderImpl implements KrbCustomAuthenticationProvider { + + private Map userMap = new HashMap(); + + public SimpleCustomAuthWithKerberosProviderImpl() { + init(); + } + + private void init(){ + userMap.put("hiveuser","hive"); + } + + @Override + public void Authenticate(String user, String password) throws AuthenticationException { + if(!userMap.containsKey(user)){ + throw new AuthenticationException("Invalid user : "+user); + } + if(!userMap.get(user).equals(password)){ + throw new AuthenticationException("Invalid passwd : "+password); + } + } + } + + + private void setCustomAuthOverSslConfWithKrbOverlay(Map confOverlay) { + // Custom class over SSL with Kerberos + confOverlay.put(ConfVars.HIVE_SERVER2_KERBEROS_USE_SSL.varname, "true"); + confOverlay.put(ConfVars.HIVE_SERVER2_KERBEROS_SSL_PORT.varname, KERBEROS_SSL_PORT.toString()); + confOverlay.put(ConfVars.HIVE_SERVER2_KERBEROS_SSL_KEYSTORE_PATH.varname, + dataFileDir + File.separator + KEY_STORE_NAME); + confOverlay.put(ConfVars.HIVE_SERVER2_KERBEROS_SSL_KEYSTORE_PASSWORD.varname, + KEY_STORE_PASSWORD); + confOverlay.put(ConfVars.HIVE_SERVER2_KERBEROS_SSL_MIN_WORKER_THREADS.varname, + "3"); + confOverlay.put(ConfVars.HIVE_SERVER2_KERBEROS_SSL_MAX_WORKER_THREADS.varname, + "5"); + confOverlay.put(ConfVars.HIVE_SERVER2_KERBEROS_SSL_CUSTOM_AUTHENTICATION_CLASS.varname, + "org.apache.hive.minikdc.TestJdbWithCustomAuthOverSSL$SimpleCustomAuthWithKerberosProviderImpl"); + } + +} diff --git service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java index 1e6ac4f..c2769ed 100644 --- service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -176,6 +176,26 @@ public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws Log } } + /** + * Returns an authentication factory for HiveServer2 running + * that verifies the ID/PASSWORD using custom class over SSL with Kerberos + * @return + * @throws LoginException + */ + public TTransportFactory getAuthPlainTransFactory() throws LoginException { + TTransportFactory transportFactory; + if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) { + try { + transportFactory = saslServer.createPlainTransportFactory(getSaslProperties()); + } catch (TTransportException e) { + throw new LoginException(e.getMessage()); + } + } else { + throw new LoginException("Unsupported authentication type " + authTypeStr); + } + return transportFactory; + } + public String getRemoteUser() { return saslServer == null ? null : saslServer.getRemoteUser(); } diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java index 6c9efba..5b582f4 100644 --- service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java +++ service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java @@ -30,21 +30,33 @@ import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.cli.CLIService; +import org.apache.hive.service.server.HiveServer2; import org.apache.hive.service.server.ThreadFactoryWithGarbageCleanup; import org.apache.thrift.TProcessorFactory; import org.apache.thrift.protocol.TBinaryProtocol; +import org.apache.thrift.server.TServer; import org.apache.thrift.server.TThreadPoolServer; import org.apache.thrift.transport.TServerSocket; import org.apache.thrift.transport.TTransportFactory; public class ThriftBinaryCLIService extends ThriftCLIService { + TServer sslWithKrbServer; public ThriftBinaryCLIService(CLIService cliService) { super(cliService, ThriftBinaryCLIService.class.getSimpleName()); } @Override + public synchronized void stop() { + if (sslWithKrbServer != null) { + sslWithKrbServer.stop(); + LOG.info("Thrift SASL(PLAIN) over SSL with Kerberos server has stopped"); + } + super.stop(); + } + + @Override public void run() { try { // Server thread pool @@ -96,6 +108,29 @@ public void run() { String msg = "Starting " + ThriftBinaryCLIService.class.getSimpleName() + " on port " + portNum + " with " + minWorkerThreads + "..." + maxWorkerThreads + " worker threads"; LOG.info(msg); + + // New thread : SASL(PLAIN) over SSL with Kerberos thread for custom class + final ThriftCLIService svc = this; + + if (!HiveServer2.isHTTPTransportMode(hiveConf) + && isKerberosAuthMode() + && hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_KERBEROS_USE_SSL)) { + Thread t = new Thread() { + @Override + public void run() { + try { + startPlainSSLWithKerberos(hiveConf, + svc, hiveHost, sslWithKrbServer); + } catch (Throwable t) { + LOG.error( + "Failure ThriftBinaryCLIService SASL over SSL with Kerberos listening on " + + hiveHost + ": " + t.getMessage()); + } + } + }; + t.start(); + } + server.serve(); } catch (Throwable t) { LOG.fatal( @@ -105,4 +140,75 @@ public void run() { } } + // SASL(PLAIN) over SSL with Kerberos thread for custom class + private static void startPlainSSLWithKerberos( + final HiveConf hiveConf, + final ThriftCLIService service, + final String hiveHost, + TServer sslWithKrbServer) throws Exception { + + try { + int minThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_KERBEROS_SSL_MIN_WORKER_THREADS); + int maxThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_KERBEROS_SSL_MAX_WORKER_THREADS); + + // SASL over SSL with Kerberos Server thread pool + String threadPoolName = "HiveServer2-SSL-with-Krb-Handler-Pool"; + ExecutorService executorService = new ThreadPoolExecutor(minThreads, maxThreads, + service.workerKeepAliveTime, TimeUnit.SECONDS, new SynchronousQueue(), + new ThreadFactoryWithGarbageCleanup(threadPoolName)); + + int sslPortNum; + String portString = System.getenv("HIVE_SERVER2_KERBEROS_SSL_PORT"); + if (portString != null) { + sslPortNum = Integer.valueOf(portString); + } else { + sslPortNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_KERBEROS_SSL_PORT); + } + + // SASL over SSL with Kerberos configs + HiveAuthFactory hiveAuthFactory = new HiveAuthFactory(hiveConf); + TTransportFactory transportFactory = hiveAuthFactory.getAuthPlainTransFactory(); + TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(service); + TServerSocket sslWithKrbSocket = null; + List sslVersionBlacklist = new ArrayList(); + for (String sslVersion : hiveConf.getVar(ConfVars.HIVE_SSL_PROTOCOL_BLACKLIST).split(",")) { + sslVersionBlacklist.add(sslVersion); + } + String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_SSL_KEYSTORE_PATH).trim(); + if (keyStorePath.isEmpty()) { + throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_KERBEROS_SSL_KEYSTORE_PATH.varname + + " Not configured for SSL connection"); + } + String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf, + HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_SSL_KEYSTORE_PASSWORD.varname); + sslWithKrbSocket = HiveAuthFactory.getServerSSLSocket(hiveHost, sslPortNum, + keyStorePath, keyStorePassword, sslVersionBlacklist); + + // Server args + int maxMessageSize = hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE); + int requestTimeout = (int) hiveConf.getTimeVar( + HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_TIMEOUT, TimeUnit.SECONDS); + int beBackoffSlotLength = (int) hiveConf.getTimeVar( + HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_BEBACKOFF_SLOT_LENGTH, TimeUnit.MILLISECONDS); + TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(sslWithKrbSocket) + .processorFactory(processorFactory).transportFactory(transportFactory) + .protocolFactory(new TBinaryProtocol.Factory()) + .inputProtocolFactory(new TBinaryProtocol.Factory(true, true, maxMessageSize, maxMessageSize)) + .requestTimeout(requestTimeout).requestTimeoutUnit(TimeUnit.SECONDS) + .beBackoffSlotLength(beBackoffSlotLength).beBackoffSlotLengthUnit(TimeUnit.MILLISECONDS) + .executorService(executorService); + + // TCP Server + sslWithKrbServer = new TThreadPoolServer(sargs); + String msg = "Starting " + ThriftBinaryCLIService.class.getSimpleName() + + " SASL over SSL with Kerberos listening on " + + sslPortNum + " with " + minThreads + "..." + maxThreads + " worker threads"; + LOG.info(msg); + + sslWithKrbServer.serve(); + } catch (Throwable t) { + LOG.fatal( + "Error starting HiveServer2: could not start SSL with Kerberos", t); + } + } } diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index dfb7faa..7f91c02 100644 --- service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -737,7 +737,7 @@ private String getProxyUser(String realUser, Map sessionConf, return proxyUser; } - private boolean isKerberosAuthMode() { + protected boolean isKerberosAuthMode() { return cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION) .equalsIgnoreCase(HiveAuthFactory.AuthTypes.KERBEROS.toString()); } diff --git service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index 56c8cb6..fa4c70d 100644 --- service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -520,7 +520,7 @@ private String getAuthHeader(HttpServletRequest request, String authType) return authHeaderBase64String; } - private boolean isKerberosAuthMode(String authType) { + protected boolean isKerberosAuthMode(String authType) { return authType.equalsIgnoreCase(HiveAuthFactory.AuthTypes.KERBEROS.toString()); } diff --git shims/common/src/main/java/org/apache/hadoop/hive/thrift/DefaultKrbCustomAuthenticationProviderImpl.java shims/common/src/main/java/org/apache/hadoop/hive/thrift/DefaultKrbCustomAuthenticationProviderImpl.java new file mode 100644 index 0000000..dee650e --- /dev/null +++ shims/common/src/main/java/org/apache/hadoop/hive/thrift/DefaultKrbCustomAuthenticationProviderImpl.java @@ -0,0 +1,16 @@ +package org.apache.hadoop.hive.thrift; + +import javax.security.sasl.AuthenticationException; + +public class DefaultKrbCustomAuthenticationProviderImpl implements KrbCustomAuthenticationProvider { + + /** + * This class will be called when you set + * hive.server2.authentication=KERBEROS and hive.server2.kerberos.use.SSL = true + * without hive.server2.kerberos.ssl.custom.authentication.class= on hive-site.xml + **/ + @Override + public void Authenticate(String user, String password) throws AuthenticationException { + throw new AuthenticationException("Unsupported authentication method on Kerberos environment"); + } +} diff --git shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java index 9d49ad5..ebca3a6 100644 --- shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java +++ shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java @@ -24,6 +24,7 @@ import java.net.Socket; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; +import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -32,6 +33,7 @@ import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; import javax.security.auth.callback.UnsupportedCallbackException; +import javax.security.sasl.AuthenticationException; import javax.security.sasl.AuthorizeCallback; import javax.security.sasl.RealmCallback; import javax.security.sasl.RealmChoiceCallback; @@ -322,9 +324,12 @@ static String encodeIdentifier(byte[] identifier) { "hive.cluster.delegation.token.store.zookeeper.acl"; public static final String DELEGATION_TOKEN_STORE_ZK_ZNODE_DEFAULT = "/hivedelegation"; + public static final String HIVE_SERVER2_KERBEROS_SSL_CUSTOM_AUTHENTICATION_CLASS = + "hive.server2.kerberos.ssl.custom.authentication.class"; protected final UserGroupInformation realUgi; protected DelegationTokenSecretManager secretManager; + Configuration thriftConf; public Server() throws TTransportException { try { @@ -361,9 +366,9 @@ protected Server(String keytabFile, String principalConf) /** * Create a TTransportFactory that, upon connection of a client socket, - * negotiates a Kerberized SASL transport. The resulting TTransportFactory - * can be passed as both the input and output transport factory when - * instantiating a TThreadPoolServer, for example. + * negotiates a Kerberized SASL transport with KERBEROS and DIGEST mechanisms. + * The resulting TTransportFactory can be passed as both the input and output + * transport factory when instantiating a TThreadPoolServer, for example. * * @param saslProps Map of SASL properties */ @@ -391,6 +396,32 @@ public TTransportFactory createTransportFactory(Map saslProps) } /** + * Create a TTransportFactory that, upon connection of a client socket, + * negotiates a Kerberized SASL transport with PLAIN mechanism. + * The resulting TTransportFactory can be passed as both the input and output + * transport factory when instantiating a TThreadPoolServer, for example. + * + * @param saslProps Map of SASL properties + */ + public TTransportFactory createPlainTransportFactory(Map saslProps) + throws TTransportException { + // Parse out the kerberos principal, host, realm. + String kerberosName = realUgi.getUserName(); + final String names[] = SaslRpcServer.splitKerberosName(kerberosName); + if (names.length != 3) { + throw new TTransportException("Kerberos principal should have 3 parts: " + kerberosName); + } + + TSaslServerTransport.Factory transFactory = new TSaslServerTransport.Factory(); + // Add for custom class on kerberized cluster + transFactory.addServerDefinition("PLAIN", + "NONE", null, new HashMap(), + new SaslCustomServerCallbackHandler(thriftConf)); + + return new TUGIAssumingTransportFactory(transFactory, realUgi); + } + + /** * Wrap a TProcessor in such a way that, before processing any RPC, it * assumes the UserGroupInformation of the user authenticated by * the SASL transport. @@ -446,6 +477,7 @@ public void startDelegationTokenSecretManager(Configuration conf, Object rawStor tokenMaxLifetime, tokenRenewInterval, tokenGcInterval, dts); + thriftConf = new Configuration(conf); secretManager.startThreads(); } @@ -547,6 +579,64 @@ public String getRemoteUser() { return remoteUser.get(); } + /** CallbackHandler for Server Custom class over SSL with Kerberos */ + // The client mechanism uses PLAIN on SASL API with Kerberos. + // Therefore, it has the similar format with PlainServerCallbackHandler. + static class SaslCustomServerCallbackHandler implements CallbackHandler { + private KrbCustomAuthenticationProvider customProvider; + + public SaslCustomServerCallbackHandler(Configuration conf) { + // Default custom class : It only allows CUSTOM authentication over SSL with Kerberos cluster + String customClassName = conf.get( + HIVE_SERVER2_KERBEROS_SSL_CUSTOM_AUTHENTICATION_CLASS, + "org.apache.hadoop.hive.thrift.DefaultKrbCustomAuthenticationProviderImpl"); + + try { + Class customHandlerClass = + Class.forName(customClassName).asSubclass( + KrbCustomAuthenticationProvider.class); + this.customProvider = ReflectionUtils.newInstance(customHandlerClass, conf); + } catch (ClassNotFoundException e) { + LOG.debug("Cannot find the custom authentication class: " + + customClassName); + } + } + + @Override + public void handle(Callback[] callbacks) throws InvalidToken, + UnsupportedCallbackException { + String userName = null; + String userPassword = null; + AuthorizeCallback ac = null; + + for (Callback callback : callbacks) { + if (callback instanceof AuthorizeCallback) { + ac = (AuthorizeCallback) callback; + } else if (callback instanceof NameCallback) { + NameCallback nc = (NameCallback) callback; + userName = nc.getName(); + } else if (callback instanceof PasswordCallback) { + PasswordCallback pc = (PasswordCallback) callback; + userPassword = new String(pc.getPassword()); + } else { + throw new UnsupportedCallbackException(callback, + "Unrecognized CUSTOM PLAIN Callback"); + } + } + + try { + // It calls custom Authentication module + this.customProvider.Authenticate(userName, userPassword); + } catch (AuthenticationException e) { + throw new InvalidToken("ERROR: ugi="+userName+" is not allowed"); + } + + if (ac != null) { + ac.setAuthorized(true); + } + } + } + /** CallbackHandler for SASL DIGEST-MD5 mechanism */ // This code is pretty much completely based on Hadoop's // SaslRpcServer.SaslDigestCallbackHandler - the only reason we could not @@ -650,7 +740,6 @@ public boolean process(final TProtocol inProt, final TProtocol outProt) throws T TSaslServerTransport saslTrans = (TSaslServerTransport)trans; SaslServer saslServer = saslTrans.getSaslServer(); String authId = saslServer.getAuthorizationID(); - authenticationMethod.set(AuthenticationMethod.KERBEROS); LOG.debug("AUTH ID ======>" + authId); String endUser = authId; @@ -664,6 +753,12 @@ public boolean process(final TProtocol inProt, final TProtocol outProt) throws T throw new TException(e.getMessage()); } } + else if (saslServer.getMechanismName().equals("GSSAPI")) { + authenticationMethod.set(AuthenticationMethod.KERBEROS); + } + else if (saslServer.getMechanismName().equals("PLAIN")) { + LOG.debug("INFO: Request custom authentication module"); + } Socket socket = ((TSocket)(saslTrans.getUnderlyingTransport())).getSocket(); remoteAddress.set(socket.getInetAddress()); UserGroupInformation clientUgi = null; diff --git shims/common/src/main/java/org/apache/hadoop/hive/thrift/KrbCustomAuthenticationProvider.java shims/common/src/main/java/org/apache/hadoop/hive/thrift/KrbCustomAuthenticationProvider.java new file mode 100644 index 0000000..1fea6aa --- /dev/null +++ shims/common/src/main/java/org/apache/hadoop/hive/thrift/KrbCustomAuthenticationProvider.java @@ -0,0 +1,21 @@ +package org.apache.hadoop.hive.thrift; + +import javax.security.sasl.AuthenticationException; + +public interface KrbCustomAuthenticationProvider { + /** + * The Authenticate method is called by the thrift (HiveServer2, HiveMetastore) authentication layer + * to authenticate users for their requests (Server side). + * It has the same pattern with PasswordAuthenticationProvider. + * If a user is to be granted, return nothing/throw nothing. + * When a user is to be disallowed, throw an appropriate {@link AuthenticationException}. + * + * For an example implementation, see {@link LdapAuthenticationProviderImpl}. + * + * @param user - The username received over the connection request + * @param password - The password received over the connection request + * @throws AuthenticationException - When a user is found to be + * invalid by the implementation + */ + void Authenticate(String user, String password) throws AuthenticationException; +}