Index: security/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java =================================================================== --- security/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java (revision 1326827) +++ security/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java (working copy) @@ -32,12 +32,10 @@ import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor; import org.apache.hadoop.hbase.ipc.CoprocessorProtocol; import org.apache.hadoop.hbase.ipc.HBaseRPC; +import org.apache.hadoop.hbase.ipc.HBaseServer; import org.apache.hadoop.hbase.ipc.RequestContext; import org.apache.hadoop.hbase.ipc.RpcServer; -import org.apache.hadoop.hbase.ipc.SecureRpcEngine; -import org.apache.hadoop.hbase.ipc.SecureServer; import org.apache.hadoop.hbase.regionserver.HRegionServer; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.security.UserGroupInformation; @@ -65,11 +63,7 @@ } public String getAuthMethod() { - UserGroupInformation ugi = null; - User user = RequestContext.getRequestUser(); - if (user != null) { - ugi = user.getUGI(); - } + UserGroupInformation ugi = RequestContext.getRequestUser(); if (ugi != null) { return ugi.getAuthenticationMethod().toString(); } @@ -84,15 +78,13 @@ public static void setupBeforeClass() throws Exception { TEST_UTIL = new HBaseTestingUtility(); Configuration conf = TEST_UTIL.getConfiguration(); - conf.set(HBaseRPC.RPC_ENGINE_PROP, SecureRpcEngine.class.getName()); conf.set("hbase.coprocessor.region.classes", IdentityCoprocessor.class.getName()); TEST_UTIL.startMiniCluster(); HRegionServer rs = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0); RpcServer server = rs.getRpcServer(); - assertTrue(server instanceof SecureServer); - SecretManager mgr = - ((SecureServer)server).getSecretManager(); + SecretManager mgr = + ((HBaseServer)server).getSecretManager(); assertTrue(mgr instanceof AuthenticationTokenSecretManager); secretManager = (AuthenticationTokenSecretManager)mgr; } Index: security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java =================================================================== --- security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java (revision 1326827) +++ security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java (working copy) @@ -21,8 +21,7 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.ipc.SecureRpcEngine; -import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.security.UserGroupInformation; /** * Utility methods for testing security @@ -31,11 +30,10 @@ public static void enableSecurity(Configuration conf) throws IOException { conf.set("hadoop.security.authorization", "false"); conf.set("hadoop.security.authentication", "simple"); - conf.set("hbase.rpc.engine", SecureRpcEngine.class.getName()); conf.set("hbase.coprocessor.master.classes", AccessController.class.getName()); conf.set("hbase.coprocessor.region.classes", AccessController.class.getName()); // add the process running user to superusers - String currentUser = User.getCurrent().getName(); + String currentUser = UserGroupInformation.getCurrentUser().getUserName(); conf.set("hbase.superuser", "admin,"+currentUser); } } Index: security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java =================================================================== --- security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java (revision 1326827) +++ security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java (working copy) @@ -53,8 +53,8 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.security.AccessDeniedException; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.security.UserGroupInformation; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -71,15 +71,15 @@ private static Configuration conf; // user with all permissions - private static User SUPERUSER; + private static UserGroupInformation SUPERUSER; // table owner user - private static User USER_OWNER; + private static UserGroupInformation USER_OWNER; // user with rw permissions - private static User USER_RW; + private static UserGroupInformation USER_RW; // user with read-only permissions - private static User USER_RO; + private static UserGroupInformation USER_RO; // user with no permissions - private static User USER_NONE; + private static UserGroupInformation USER_NONE; private static byte[] TEST_TABLE = Bytes.toBytes("testtable"); private static byte[] TEST_FAMILY = Bytes.toBytes("f1"); @@ -103,27 +103,27 @@ Coprocessor.PRIORITY_HIGHEST, 1, conf); // create a set of test users - SUPERUSER = User.createUserForTesting(conf, "admin", new String[]{"supergroup"}); - USER_OWNER = User.createUserForTesting(conf, "owner", new String[0]); - USER_RW = User.createUserForTesting(conf, "rwuser", new String[0]); - USER_RO = User.createUserForTesting(conf, "rouser", new String[0]); - USER_NONE = User.createUserForTesting(conf, "nouser", new String[0]); + SUPERUSER = UserGroupInformation.createUserForTesting("admin", new String[]{"supergroup"}); + USER_OWNER = UserGroupInformation.createUserForTesting("owner", new String[0]); + USER_RW = UserGroupInformation.createUserForTesting("rwuser", new String[0]); + USER_RO = UserGroupInformation.createUserForTesting("rouser", new String[0]); + USER_NONE = UserGroupInformation.createUserForTesting("nouser", new String[0]); HBaseAdmin admin = TEST_UTIL.getHBaseAdmin(); HTableDescriptor htd = new HTableDescriptor(TEST_TABLE); htd.addFamily(new HColumnDescriptor(TEST_FAMILY)); - htd.setOwnerString(USER_OWNER.getShortName()); + htd.setOwnerString(USER_OWNER.getShortUserName()); admin.createTable(htd); // initilize access control HTable meta = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); AccessControllerProtocol protocol = meta.coprocessorProxy(AccessControllerProtocol.class, TEST_TABLE); - protocol.grant(Bytes.toBytes(USER_RW.getShortName()), + protocol.grant(Bytes.toBytes(USER_RW.getShortUserName()), new TablePermission(TEST_TABLE, TEST_FAMILY, Permission.Action.READ, Permission.Action.WRITE)); - protocol.grant(Bytes.toBytes(USER_RO.getShortName()), + protocol.grant(Bytes.toBytes(USER_RO.getShortUserName()), new TablePermission(TEST_TABLE, TEST_FAMILY, Permission.Action.READ)); } @@ -132,28 +132,31 @@ TEST_UTIL.shutdownMiniCluster(); } - public void verifyAllowed(User user, PrivilegedExceptionAction action) - throws Exception { + public void verifyAllowed(UserGroupInformation user, + PrivilegedExceptionAction action) throws Exception { try { - user.runAs(action); + user.doAs(action); } catch (AccessDeniedException ade) { - fail("Expected action to pass for user '" + user.getShortName() + + fail("Expected action to pass for user '" + user.getShortUserName() + "' but was denied"); } } - public void verifyAllowed(PrivilegedExceptionAction action, User... users) + public void verifyAllowed(PrivilegedExceptionAction action, + UserGroupInformation... users) throws Exception { - for (User user : users) { + for (UserGroupInformation user : users) { verifyAllowed(user, action); } } - public void verifyDenied(User user, PrivilegedExceptionAction action) + public void verifyDenied(UserGroupInformation user, + PrivilegedExceptionAction action) throws Exception { try { - user.runAs(action); - fail("Expected AccessDeniedException for user '" + user.getShortName() + "'"); + user.doAs(action); + fail("Expected AccessDeniedException for user '" + user.getShortUserName() + + "'"); } catch (RetriesExhaustedWithDetailsException e) { // in case of batch operations, and put, the client assembles a // RetriesExhaustedWithDetailsException instead of throwing an @@ -167,16 +170,16 @@ } if (!isAccessDeniedException ) { fail("Not receiving AccessDeniedException for user '" + - user.getShortName() + "'"); + user.getShortUserName() + "'"); } } catch (AccessDeniedException ade) { // expected result } } - public void verifyDenied(PrivilegedExceptionAction action, User... users) - throws Exception { - for (User user : users) { + public void verifyDenied(PrivilegedExceptionAction action, + UserGroupInformation... users) throws Exception { + for (UserGroupInformation user : users) { verifyDenied(user, action); } } @@ -209,7 +212,8 @@ public Object run() throws Exception { HTableDescriptor htd = new HTableDescriptor(TEST_TABLE); htd.addFamily(new HColumnDescriptor(TEST_FAMILY)); - htd.addFamily(new HColumnDescriptor("fam_"+User.getCurrent().getShortName())); + htd.addFamily(new HColumnDescriptor("fam_"+ + UserGroupInformation.getCurrentUser().getShortUserName())); ACCESS_CONTROLLER.preModifyTable(ObserverContext.createAndPrepare(CP_ENV, null), TEST_TABLE, htd); return null; } @@ -607,11 +611,11 @@ HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor(family1)); htd.addFamily(new HColumnDescriptor(family2)); - htd.setOwnerString(USER_OWNER.getShortName()); + htd.setOwnerString(USER_OWNER.getShortUserName()); admin.createTable(htd); // create temp users - User user = User.createUserForTesting(TEST_UTIL.getConfiguration(), + UserGroupInformation user = UserGroupInformation.createUserForTesting( "user", new String[0]); // perms only stored against the first region @@ -720,7 +724,7 @@ verifyDenied(user, deleteAction2); // grant table read permission - protocol.grant(Bytes.toBytes(user.getShortName()), + protocol.grant(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, null, Permission.Action.READ)); Thread.sleep(100); // check @@ -737,7 +741,7 @@ verifyDenied(user, deleteAction2); // grant table write permission - protocol.grant(Bytes.toBytes(user.getShortName()), + protocol.grant(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, null, Permission.Action.WRITE)); Thread.sleep(100); verifyDenied(user, getActionAll); @@ -753,11 +757,11 @@ verifyAllowed(user, deleteAction2); // revoke table permission - protocol.grant(Bytes.toBytes(user.getShortName()), + protocol.grant(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, null, Permission.Action.READ, Permission.Action.WRITE)); - protocol.revoke(Bytes.toBytes(user.getShortName()), + protocol.revoke(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, null)); Thread.sleep(100); verifyDenied(user, getActionAll); @@ -773,7 +777,7 @@ verifyDenied(user, deleteAction2); // grant column family read permission - protocol.grant(Bytes.toBytes(user.getShortName()), + protocol.grant(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, family1, Permission.Action.READ)); Thread.sleep(100); @@ -790,7 +794,7 @@ verifyDenied(user, deleteAction2); // grant column family write permission - protocol.grant(Bytes.toBytes(user.getShortName()), + protocol.grant(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, family2, Permission.Action.WRITE)); Thread.sleep(100); @@ -807,7 +811,7 @@ verifyAllowed(user, deleteAction2); // revoke column family permission - protocol.revoke(Bytes.toBytes(user.getShortName()), + protocol.revoke(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, family2)); Thread.sleep(100); @@ -850,11 +854,11 @@ HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor(family1)); htd.addFamily(new HColumnDescriptor(family2)); - htd.setOwnerString(USER_OWNER.getShortName()); + htd.setOwnerString(USER_OWNER.getShortUserName()); admin.createTable(htd); // create temp users - User user = User.createUserForTesting(TEST_UTIL.getConfiguration(), + UserGroupInformation user = UserGroupInformation.createUserForTesting( "user", new String[0]); HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); @@ -890,13 +894,13 @@ } }; - protocol.revoke(Bytes.toBytes(user.getShortName()), + protocol.revoke(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, family1)); verifyDenied(user, getQualifierAction); verifyDenied(user, putQualifierAction); verifyDenied(user, deleteQualifierAction); - protocol.grant(Bytes.toBytes(user.getShortName()), + protocol.grant(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, family1, qualifier, Permission.Action.READ)); Thread.sleep(100); @@ -907,7 +911,7 @@ // only grant write permission // TODO: comment this portion after HBASE-3583 - protocol.grant(Bytes.toBytes(user.getShortName()), + protocol.grant(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, family1, qualifier, Permission.Action.WRITE)); Thread.sleep(100); @@ -917,7 +921,7 @@ verifyAllowed(user, deleteQualifierAction); // grant both read and write permission. - protocol.grant(Bytes.toBytes(user.getShortName()), + protocol.grant(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, family1, qualifier, Permission.Action.READ, Permission.Action.WRITE)); Thread.sleep(100); @@ -927,7 +931,7 @@ verifyAllowed(user, deleteQualifierAction); // revoke family level permission won't impact column level. - protocol.revoke(Bytes.toBytes(user.getShortName()), + protocol.revoke(Bytes.toBytes(user.getShortUserName()), new TablePermission(tableName, family1, qualifier)); Thread.sleep(100); @@ -957,7 +961,7 @@ HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor(family1)); htd.addFamily(new HColumnDescriptor(family2)); - htd.setOwnerString(USER_OWNER.getShortName()); + htd.setOwnerString(USER_OWNER.getShortUserName()); admin.createTable(htd); HTable acl = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); @@ -1051,9 +1055,10 @@ protocol.checkPermissions(perms); } - public void grant(AccessControllerProtocol protocol, User user, byte[] t, byte[] f, + public void grant(AccessControllerProtocol protocol, + UserGroupInformation user, byte[] t, byte[] f, byte[] q, Permission.Action... actions) throws IOException { - protocol.grant(Bytes.toBytes(user.getShortName()), new TablePermission(t, f, q, actions)); + protocol.grant(Bytes.toBytes(user.getShortUserName()), new TablePermission(t, f, q, actions)); } @Test @@ -1091,9 +1096,13 @@ final byte[] TEST_Q1 = Bytes.toBytes("q1"); final byte[] TEST_Q2 = Bytes.toBytes("q2"); - User userTable = User.createUserForTesting(conf, "user_check_perms_table", new String[0]); - User userColumn = User.createUserForTesting(conf, "user_check_perms_family", new String[0]); - User userQualifier = User.createUserForTesting(conf, "user_check_perms_q", new String[0]); + UserGroupInformation userTable = UserGroupInformation + .createUserForTesting("user_check_perms_table", new String[0]); + UserGroupInformation userColumn = UserGroupInformation + .createUserForTesting("user_check_perms_family", + new String[0]); + UserGroupInformation userQualifier = UserGroupInformation + .createUserForTesting("user_check_perms_q", new String[0]); grant(protocol, userTable, TEST_TABLE, null, null, Permission.Action.READ); grant(protocol, userColumn, TEST_TABLE, TEST_FAMILY, null, Permission.Action.READ); Index: security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java =================================================================== --- security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java (revision 1326827) +++ security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java (working copy) @@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.security.AccessDeniedException; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.security.UserGroupInformation; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -51,10 +51,10 @@ private static Log LOG = LogFactory.getLog(TestAccessControlFilter.class); private static HBaseTestingUtility TEST_UTIL; - private static User ADMIN; - private static User READER; - private static User LIMITED; - private static User DENIED; + private static UserGroupInformation ADMIN; + private static UserGroupInformation READER; + private static UserGroupInformation LIMITED; + private static UserGroupInformation DENIED; private static byte[] TABLE = Bytes.toBytes("testtable"); private static byte[] FAMILY = Bytes.toBytes("f1"); @@ -66,16 +66,16 @@ TEST_UTIL = new HBaseTestingUtility(); Configuration conf = TEST_UTIL.getConfiguration(); SecureTestUtil.enableSecurity(conf); - String baseuser = User.getCurrent().getShortName(); + String baseuser = UserGroupInformation.getCurrentUser().getShortUserName(); conf.set("hbase.superuser", conf.get("hbase.superuser", "") + String.format(",%s.hfs.0,%s.hfs.1,%s.hfs.2", baseuser, baseuser, baseuser)); TEST_UTIL.startMiniCluster(); TEST_UTIL.waitTableAvailable(AccessControlLists.ACL_TABLE_NAME, 5000); - ADMIN = User.createUserForTesting(conf, "admin", new String[]{"supergroup"}); - READER = User.createUserForTesting(conf, "reader", new String[0]); - LIMITED = User.createUserForTesting(conf, "limited", new String[0]); - DENIED = User.createUserForTesting(conf, "denied", new String[0]); + ADMIN = UserGroupInformation.createUserForTesting("admin", new String[]{"supergroup"}); + READER = UserGroupInformation.createUserForTesting("reader", new String[0]); + LIMITED = UserGroupInformation.createUserForTesting("limited", new String[0]); + DENIED = UserGroupInformation.createUserForTesting("denied", new String[0]); } @AfterClass @@ -88,7 +88,7 @@ final HTable table = TEST_UTIL.createTable(TABLE, FAMILY); // set permissions - ADMIN.runAs(new PrivilegedExceptionAction() { + ADMIN.doAs(new PrivilegedExceptionAction() { @Override public Object run() throws Exception { HTable aclmeta = new HTable(TEST_UTIL.getConfiguration(), @@ -96,9 +96,9 @@ AccessControllerProtocol acls = aclmeta.coprocessorProxy( AccessControllerProtocol.class, Bytes.toBytes("testtable")); TablePermission perm = new TablePermission(TABLE, null, Permission.Action.READ); - acls.grant(Bytes.toBytes(READER.getShortName()), perm); + acls.grant(Bytes.toBytes(READER.getShortUserName()), perm); perm = new TablePermission(TABLE, FAMILY, PUBLIC_COL, Permission.Action.READ); - acls.grant(Bytes.toBytes(LIMITED.getShortName()), perm); + acls.grant(Bytes.toBytes(LIMITED.getShortUserName()), perm); return null; } }); @@ -114,7 +114,7 @@ table.put(puts); // test read - READER.runAs(new PrivilegedExceptionAction() { + READER.doAs(new PrivilegedExceptionAction() { public Object run() throws Exception { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection @@ -136,7 +136,7 @@ }); // test read with qualifier filter - LIMITED.runAs(new PrivilegedExceptionAction() { + LIMITED.doAs(new PrivilegedExceptionAction() { public Object run() throws Exception { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); // force a new RS connection @@ -157,7 +157,7 @@ }); // test as user with no permission - DENIED.runAs(new PrivilegedExceptionAction(){ + DENIED.doAs(new PrivilegedExceptionAction(){ public Object run() throws Exception { try { Configuration conf = new Configuration(TEST_UTIL.getConfiguration()); Index: security/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java =================================================================== --- security/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java (revision 1326827) +++ security/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java (working copy) @@ -25,11 +25,10 @@ import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; +import org.apache.hadoop.hbase.ipc.HBaseServer; import org.apache.hadoop.hbase.ipc.RequestContext; import org.apache.hadoop.hbase.ipc.RpcServer; -import org.apache.hadoop.hbase.ipc.SecureServer; import org.apache.hadoop.hbase.security.AccessDeniedException; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.Token; @@ -56,11 +55,9 @@ RegionCoprocessorEnvironment regionEnv = (RegionCoprocessorEnvironment)env; RpcServer server = regionEnv.getRegionServerServices().getRpcServer(); - if (server instanceof SecureServer) { - SecretManager mgr = ((SecureServer)server).getSecretManager(); - if (mgr instanceof AuthenticationTokenSecretManager) { - secretManager = (AuthenticationTokenSecretManager)mgr; - } + SecretManager mgr = ((HBaseServer)server).getSecretManager(); + if (mgr instanceof AuthenticationTokenSecretManager) { + secretManager = (AuthenticationTokenSecretManager)mgr; } } } @@ -73,22 +70,18 @@ "No secret manager configured for token authentication"); } - User currentUser = RequestContext.getRequestUser(); - UserGroupInformation ugi = null; - if (currentUser != null) { - ugi = currentUser.getUGI(); - } + UserGroupInformation currentUser = RequestContext.getRequestUser(); if (currentUser == null) { throw new AccessDeniedException("No authenticated user for request!"); - } else if (ugi.getAuthenticationMethod() != + } else if (currentUser.getAuthenticationMethod() != UserGroupInformation.AuthenticationMethod.KERBEROS) { - LOG.warn("Token generation denied for user="+currentUser.getName() - +", authMethod="+ugi.getAuthenticationMethod()); + LOG.warn("Token generation denied for user="+currentUser.getUserName() + +", authMethod="+currentUser.getAuthenticationMethod()); throw new AccessDeniedException( "Token generation only allowed for Kerberos authenticated clients"); } - return secretManager.generateToken(currentUser.getName()); + return secretManager.generateToken(currentUser.getUserName()); } @Override Index: security/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java =================================================================== --- security/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java (revision 1326827) +++ security/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java (working copy) @@ -1,280 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.security; - -import java.io.ByteArrayInputStream; -import java.io.DataInput; -import java.io.DataInputStream; -import java.io.DataOutput; -import java.io.IOException; -import java.util.Map; -import java.util.TreeMap; - -import javax.security.auth.callback.Callback; -import javax.security.auth.callback.CallbackHandler; -import javax.security.auth.callback.NameCallback; -import javax.security.auth.callback.PasswordCallback; -import javax.security.auth.callback.UnsupportedCallbackException; -import javax.security.sasl.AuthorizeCallback; -import javax.security.sasl.RealmCallback; -import javax.security.sasl.Sasl; - -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.ipc.HBaseServer; -import org.apache.hadoop.hbase.ipc.SecureServer; -import org.apache.hadoop.ipc.Server; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.token.SecretManager; -import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; -import org.apache.hadoop.security.token.SecretManager.InvalidToken; - -/** - * A utility class for dealing with SASL on RPC server - */ -public class HBaseSaslRpcServer { - public static final Log LOG = LogFactory.getLog(HBaseSaslRpcServer.class); - public static final String SASL_DEFAULT_REALM = "default"; - public static final Map SASL_PROPS = - new TreeMap(); - - public static final int SWITCH_TO_SIMPLE_AUTH = -88; - - public static enum QualityOfProtection { - AUTHENTICATION("auth"), - INTEGRITY("auth-int"), - PRIVACY("auth-conf"); - - public final String saslQop; - - private QualityOfProtection(String saslQop) { - this.saslQop = saslQop; - } - - public String getSaslQop() { - return saslQop; - } - } - - public static void init(Configuration conf) { - QualityOfProtection saslQOP = QualityOfProtection.AUTHENTICATION; - String rpcProtection = conf.get("hbase.rpc.protection", - QualityOfProtection.AUTHENTICATION.name().toLowerCase()); - if (QualityOfProtection.INTEGRITY.name().toLowerCase() - .equals(rpcProtection)) { - saslQOP = QualityOfProtection.INTEGRITY; - } else if (QualityOfProtection.PRIVACY.name().toLowerCase().equals( - rpcProtection)) { - saslQOP = QualityOfProtection.PRIVACY; - } - - SASL_PROPS.put(Sasl.QOP, saslQOP.getSaslQop()); - SASL_PROPS.put(Sasl.SERVER_AUTH, "true"); - } - - static String encodeIdentifier(byte[] identifier) { - return new String(Base64.encodeBase64(identifier)); - } - - static byte[] decodeIdentifier(String identifier) { - return Base64.decodeBase64(identifier.getBytes()); - } - - public static T getIdentifier(String id, - SecretManager secretManager) throws InvalidToken { - byte[] tokenId = decodeIdentifier(id); - T tokenIdentifier = secretManager.createIdentifier(); - try { - tokenIdentifier.readFields(new DataInputStream(new ByteArrayInputStream( - tokenId))); - } catch (IOException e) { - throw (InvalidToken) new InvalidToken( - "Can't de-serialize tokenIdentifier").initCause(e); - } - return tokenIdentifier; - } - - static char[] encodePassword(byte[] password) { - return new String(Base64.encodeBase64(password)).toCharArray(); - } - - /** Splitting fully qualified Kerberos name into parts */ - public static String[] splitKerberosName(String fullName) { - return fullName.split("[/@]"); - } - - public enum SaslStatus { - SUCCESS (0), - ERROR (1); - - public final int state; - private SaslStatus(int state) { - this.state = state; - } - } - - /** Authentication method */ - public static enum AuthMethod { - SIMPLE((byte) 80, "", AuthenticationMethod.SIMPLE), - KERBEROS((byte) 81, "GSSAPI", AuthenticationMethod.KERBEROS), - DIGEST((byte) 82, "DIGEST-MD5", AuthenticationMethod.TOKEN); - - /** The code for this method. */ - public final byte code; - public final String mechanismName; - public final AuthenticationMethod authenticationMethod; - - private AuthMethod(byte code, String mechanismName, - AuthenticationMethod authMethod) { - this.code = code; - this.mechanismName = mechanismName; - this.authenticationMethod = authMethod; - } - - private static final int FIRST_CODE = values()[0].code; - - /** Return the object represented by the code. */ - private static AuthMethod valueOf(byte code) { - final int i = (code & 0xff) - FIRST_CODE; - return i < 0 || i >= values().length ? null : values()[i]; - } - - /** Return the SASL mechanism name */ - public String getMechanismName() { - return mechanismName; - } - - /** Read from in */ - public static AuthMethod read(DataInput in) throws IOException { - return valueOf(in.readByte()); - } - - /** Write to out */ - public void write(DataOutput out) throws IOException { - out.write(code); - } - }; - - /** CallbackHandler for SASL DIGEST-MD5 mechanism */ - public static class SaslDigestCallbackHandler implements CallbackHandler { - private SecretManager secretManager; - private SecureServer.SecureConnection connection; - - public SaslDigestCallbackHandler( - SecretManager secretManager, - SecureServer.SecureConnection connection) { - this.secretManager = secretManager; - this.connection = connection; - } - - private char[] getPassword(TokenIdentifier tokenid) throws InvalidToken { - return encodePassword(secretManager.retrievePassword(tokenid)); - } - - /** {@inheritDoc} */ - @Override - public void handle(Callback[] callbacks) throws InvalidToken, - UnsupportedCallbackException { - NameCallback nc = null; - PasswordCallback pc = null; - AuthorizeCallback ac = null; - for (Callback callback : callbacks) { - if (callback instanceof AuthorizeCallback) { - ac = (AuthorizeCallback) callback; - } else if (callback instanceof NameCallback) { - nc = (NameCallback) callback; - } else if (callback instanceof PasswordCallback) { - pc = (PasswordCallback) callback; - } else if (callback instanceof RealmCallback) { - continue; // realm is ignored - } else { - throw new UnsupportedCallbackException(callback, - "Unrecognized SASL DIGEST-MD5 Callback"); - } - } - if (pc != null) { - TokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(), secretManager); - char[] password = getPassword(tokenIdentifier); - UserGroupInformation user = null; - user = tokenIdentifier.getUser(); // may throw exception - connection.attemptingUser = user; - if (LOG.isDebugEnabled()) { - LOG.debug("SASL server DIGEST-MD5 callback: setting password " - + "for client: " + tokenIdentifier.getUser()); - } - pc.setPassword(password); - } - if (ac != null) { - String authid = ac.getAuthenticationID(); - String authzid = ac.getAuthorizationID(); - if (authid.equals(authzid)) { - ac.setAuthorized(true); - } else { - ac.setAuthorized(false); - } - if (ac.isAuthorized()) { - if (LOG.isDebugEnabled()) { - String username = - getIdentifier(authzid, secretManager).getUser().getUserName(); - LOG.debug("SASL server DIGEST-MD5 callback: setting " - + "canonicalized client ID: " + username); - } - ac.setAuthorizedID(authzid); - } - } - } - } - - /** CallbackHandler for SASL GSSAPI Kerberos mechanism */ - public static class SaslGssCallbackHandler implements CallbackHandler { - - /** {@inheritDoc} */ - @Override - public void handle(Callback[] callbacks) throws - UnsupportedCallbackException { - AuthorizeCallback ac = null; - for (Callback callback : callbacks) { - if (callback instanceof AuthorizeCallback) { - ac = (AuthorizeCallback) callback; - } else { - throw new UnsupportedCallbackException(callback, - "Unrecognized SASL GSSAPI Callback"); - } - } - if (ac != null) { - String authid = ac.getAuthenticationID(); - String authzid = ac.getAuthorizationID(); - if (authid.equals(authzid)) { - ac.setAuthorized(true); - } else { - ac.setAuthorized(false); - } - if (ac.isAuthorized()) { - if (LOG.isDebugEnabled()) - LOG.debug("SASL server GSSAPI callback: setting " - + "canonicalized client ID: " + authzid); - ac.setAuthorizedID(authzid); - } - } - } - } -} Index: security/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java =================================================================== --- security/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java (revision 1326827) +++ security/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java (working copy) @@ -25,9 +25,9 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.zookeeper.KeeperException; import java.io.*; @@ -75,12 +75,12 @@ } private void initGlobal(Configuration conf) throws IOException { - User user = User.getCurrent(); + UserGroupInformation user = UserGroupInformation.getCurrentUser(); if (user == null) { throw new IOException("Unable to obtain the current user, " + "authorization checks for internal operations will not work correctly!"); } - String currentUser = user.getShortName(); + String currentUser = user.getShortUserName(); // the system user is always included List superusers = Lists.asList(currentUser, conf.getStrings( @@ -186,12 +186,13 @@ * @param action * @return */ - public boolean authorize(User user, Permission.Action action) { + public boolean authorize(UserGroupInformation user, + Permission.Action action) { if (user == null) { return false; } - if (authorize(USER_CACHE.get(user.getShortName()), action)) { + if (authorize(USER_CACHE.get(user.getShortUserName()), action)) { return true; } @@ -225,10 +226,10 @@ return false; } - public boolean authorize(User user, byte[] table, KeyValue kv, + public boolean authorize(UserGroupInformation user, byte[] table, KeyValue kv, TablePermission.Action action) { List userPerms = getUserPermissions( - user.getShortName(), table); + user.getShortUserName(), table); if (authorize(userPerms, table, kv, action)) { return true; } @@ -322,9 +323,10 @@ return authorize(getGroupPermissions(groupName, table), table, family, action); } - public boolean authorize(User user, byte[] table, byte[] family, - byte[] qualifier, Permission.Action action) { - if (authorizeUser(user.getShortName(), table, family, qualifier, action)) { + public boolean authorize(UserGroupInformation user, byte[] table, + byte[] family, byte[] qualifier, Permission.Action action) { + if (authorizeUser(user.getShortUserName(), table, family, qualifier, + action)) { return true; } @@ -339,8 +341,8 @@ return false; } - public boolean authorize(User user, byte[] table, byte[] family, - Permission.Action action) { + public boolean authorize(UserGroupInformation user, byte[] table, + byte[] family, Permission.Action action) { return authorize(user, table, family, null, action); } @@ -350,10 +352,10 @@ * may be scoped to a given column qualifier and does not guarantee that * authorize() on the same column family would return true. */ - public boolean matchPermission(User user, + public boolean matchPermission(UserGroupInformation user, byte[] table, byte[] family, TablePermission.Action action) { List userPerms = getUserPermissions( - user.getShortName(), table); + user.getShortUserName(), table); if (userPerms != null) { for (TablePermission p : userPerms) { if (p.matchesFamily(table, family, action)) { @@ -379,11 +381,11 @@ return false; } - public boolean matchPermission(User user, + public boolean matchPermission(UserGroupInformation user, byte[] table, byte[] family, byte[] qualifier, TablePermission.Action action) { List userPerms = getUserPermissions( - user.getShortName(), table); + user.getShortUserName(), table); if (userPerms != null) { for (TablePermission p : userPerms) { if (p.matchesFamilyQualifier(table, family, qualifier, action)) { Index: security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java =================================================================== --- security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java (revision 1326827) +++ security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java (working copy) @@ -54,8 +54,8 @@ import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.security.AccessDeniedException; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.security.UserGroupInformation; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; @@ -107,9 +107,9 @@ private final byte[] qualifier; private final Permission.Action action; private final String reason; - private final User user; + private final UserGroupInformation user; - public AuthResult(boolean allowed, String reason, User user, + public AuthResult(boolean allowed, String reason, UserGroupInformation user, Permission.Action action, byte[] table, byte[] family, byte[] qualifier) { this.allowed = allowed; this.reason = reason; @@ -122,12 +122,12 @@ public boolean isAllowed() { return allowed; } - public User getUser() { return user; } + public UserGroupInformation getUser() { return user; } public String getReason() { return reason; } public String toContextString() { - return "(user=" + (user != null ? user.getName() : "UNKNOWN") + ", " + + return "(user=" + (user != null ? user.getUserName() : "UNKNOWN") + ", " + "scope=" + (table == null ? "GLOBAL" : Bytes.toString(table)) + ", " + "family=" + (family != null ? Bytes.toString(family) : "") + ", " + "qualifer=" + (qualifier != null ? Bytes.toString(qualifier) : "") + ", " + @@ -139,17 +139,17 @@ .append(toContextString()).toString(); } - public static AuthResult allow(String reason, User user, + public static AuthResult allow(String reason, UserGroupInformation user, Permission.Action action, byte[] table) { return new AuthResult(true, reason, user, action, table, null, null); } - public static AuthResult deny(String reason, User user, + public static AuthResult deny(String reason, UserGroupInformation user, Permission.Action action, byte[] table) { return new AuthResult(false, reason, user, action, table, null, null); } - public static AuthResult deny(String reason, User user, + public static AuthResult deny(String reason, UserGroupInformation user, Permission.Action action, byte[] table, byte[] family, byte[] qualifier) { return new AuthResult(false, reason, user, action, table, family, qualifier); } @@ -247,8 +247,8 @@ * the request * @return */ - AuthResult permissionGranted(User user, TablePermission.Action permRequest, - RegionCoprocessorEnvironment e, + AuthResult permissionGranted(UserGroupInformation user, + TablePermission.Action permRequest, RegionCoprocessorEnvironment e, Map> families) { HRegionInfo hri = e.getRegion().getRegionInfo(); HTableDescriptor htd = e.getRegion().getTableDesc(); @@ -269,7 +269,7 @@ // 2. The table owner has full privileges String owner = htd.getOwnerString(); - if (user.getShortName().equals(owner)) { + if (user.getShortUserName().equals(owner)) { // owner of the table has full access return AuthResult.allow("User is table owner", user, permRequest, hri.getTableName()); @@ -333,7 +333,8 @@ private void logResult(AuthResult result) { if (AUDITLOG.isTraceEnabled()) { AUDITLOG.trace("Access " + (result.isAllowed() ? "allowed" : "denied") + - " for user " + (result.getUser() != null ? result.getUser().getShortName() : "UNKNOWN") + + " for user " + (result.getUser() != null ? + result.getUser().getShortUserName() : "UNKNOWN") + "; reason: " + result.getReason() + "; context: " + result.toContextString()); } @@ -344,11 +345,11 @@ * If we are in the context of an RPC call, the remote user is used, * otherwise the currently logged in user is used. */ - private User getActiveUser() throws IOException { - User user = RequestContext.getRequestUser(); + private UserGroupInformation getActiveUser() throws IOException { + UserGroupInformation user = RequestContext.getRequestUser(); if (!RequestContext.isInRequestContext()) { // for non-rpc handling, fallback to system user - user = User.getCurrent(); + user = UserGroupInformation.getCurrentUser(); } return user; } @@ -360,14 +361,14 @@ * @throws AccessDeniedException if authorization is denied */ private void requirePermission(Permission.Action perm) throws IOException { - User user = getActiveUser(); + UserGroupInformation user = getActiveUser(); if (authManager.authorize(user, perm)) { logResult(AuthResult.allow("Global check allowed", user, perm, null)); } else { logResult(AuthResult.deny("Global check failed", user, perm, null)); throw new AccessDeniedException("Insufficient permissions for user '" + - (user != null ? user.getShortName() : "null") +"' (global, action=" + - perm.toString() + ")"); + (user != null ? user.getShortUserName() : "null") + +"' (global, action=" + perm.toString() + ")"); } } @@ -402,7 +403,7 @@ RegionCoprocessorEnvironment env, Map> families) throws IOException { - User user = getActiveUser(); + UserGroupInformation user = getActiveUser(); AuthResult result = permissionGranted(user, perm, env, families); logResult(result); @@ -428,7 +429,7 @@ * Returns true if the current user is allowed the given action * over at least one of the column qualifiers in the given column families. */ - private boolean hasFamilyQualifierPermission(User user, + private boolean hasFamilyQualifierPermission(UserGroupInformation user, TablePermission.Action perm, RegionCoprocessorEnvironment env, Map> familyMap) @@ -491,7 +492,7 @@ requirePermission(Permission.Action.CREATE); // default the table owner if not specified - User owner = getActiveUser(); + UserGroupInformation owner = getActiveUser(); if (desc.getOwnerString() == null || desc.getOwnerString().equals("")) { desc.setOwner(owner); @@ -688,7 +689,7 @@ in one of the families. If it is present, then continue with the AccessControlFilter. */ RegionCoprocessorEnvironment e = c.getEnvironment(); - User requestUser = getActiveUser(); + UserGroupInformation requestUser = getActiveUser(); AuthResult authResult = permissionGranted(requestUser, TablePermission.Action.READ, e, get.getFamilyMap()); if (!authResult.isAllowed()) { @@ -809,7 +810,7 @@ in one of the families. If it is present, then continue with the AccessControlFilter. */ RegionCoprocessorEnvironment e = c.getEnvironment(); - User user = getActiveUser(); + UserGroupInformation user = getActiveUser(); AuthResult authResult = permissionGranted(user, TablePermission.Action.READ, e, scan.getFamilyMap()); if (!authResult.isAllowed()) { @@ -833,7 +834,7 @@ // no table/family level perms and no qualifier level perms, reject logResult(authResult); throw new AccessDeniedException("Insufficient permissions for user '"+ - (user != null ? user.getShortName() : "null")+"' "+ + (user != null ? user.getShortUserName() : "null")+"' "+ "for scanner open on table " + Bytes.toString(getTableName(e))); } } else { @@ -846,9 +847,10 @@ @Override public RegionScanner postScannerOpen(final ObserverContext c, final Scan scan, final RegionScanner s) throws IOException { - User user = getActiveUser(); - if (user != null && user.getShortName() != null) { // store reference to scanner owner for later checks - scannerOwners.put(s, user.getShortName()); + UserGroupInformation user = getActiveUser(); + if (user != null && user.getShortUserName() != null) { + // store reference to scanner owner for later checks + scannerOwners.put(s, user.getShortUserName()); } return s; } Index: security/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java =================================================================== --- security/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java (revision 1326827) +++ security/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java (working copy) @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.filter.FilterBase; -import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.security.UserGroupInformation; /** * NOTE: for internal use only by AccessController implementation @@ -45,7 +45,7 @@ private TableAuthManager authManager; private byte[] table; - private User user; + private UserGroupInformation user; /** * For Writable @@ -53,7 +53,7 @@ AccessControlFilter() { } - AccessControlFilter(TableAuthManager mgr, User ugi, + AccessControlFilter(TableAuthManager mgr, UserGroupInformation ugi, byte[] tableName) { authManager = mgr; table = tableName; Index: security/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java =================================================================== --- security/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java (revision 1326827) +++ security/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java (working copy) @@ -1,280 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.security; - -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -import javax.security.auth.callback.Callback; -import javax.security.auth.callback.CallbackHandler; -import javax.security.auth.callback.NameCallback; -import javax.security.auth.callback.PasswordCallback; -import javax.security.auth.callback.UnsupportedCallbackException; -import javax.security.sasl.RealmCallback; -import javax.security.sasl.RealmChoiceCallback; -import javax.security.sasl.Sasl; -import javax.security.sasl.SaslException; -import javax.security.sasl.SaslClient; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.io.WritableUtils; -import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.AuthMethod; -import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslStatus; -import org.apache.hadoop.security.SaslInputStream; -import org.apache.hadoop.security.SaslOutputStream; -import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.security.token.TokenIdentifier; - -/** - * A utility class that encapsulates SASL logic for RPC client. - * Copied from org.apache.hadoop.security - */ -public class HBaseSaslRpcClient { - public static final Log LOG = LogFactory.getLog(HBaseSaslRpcClient.class); - - private final SaslClient saslClient; - - /** - * Create a HBaseSaslRpcClient for an authentication method - * - * @param method - * the requested authentication method - * @param token - * token to use if needed by the authentication method - */ - public HBaseSaslRpcClient(AuthMethod method, - Token token, String serverPrincipal) - throws IOException { - switch (method) { - case DIGEST: - if (LOG.isDebugEnabled()) - LOG.debug("Creating SASL " + AuthMethod.DIGEST.getMechanismName() - + " client to authenticate to service at " + token.getService()); - saslClient = Sasl.createSaslClient(new String[] { AuthMethod.DIGEST - .getMechanismName() }, null, null, HBaseSaslRpcServer.SASL_DEFAULT_REALM, - HBaseSaslRpcServer.SASL_PROPS, new SaslClientCallbackHandler(token)); - break; - case KERBEROS: - if (LOG.isDebugEnabled()) { - LOG - .debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName() - + " client. Server's Kerberos principal name is " - + serverPrincipal); - } - if (serverPrincipal == null || serverPrincipal.length() == 0) { - throw new IOException( - "Failed to specify server's Kerberos principal name"); - } - String names[] = HBaseSaslRpcServer.splitKerberosName(serverPrincipal); - if (names.length != 3) { - throw new IOException( - "Kerberos principal name does NOT have the expected hostname part: " - + serverPrincipal); - } - saslClient = Sasl.createSaslClient(new String[] { AuthMethod.KERBEROS - .getMechanismName() }, null, names[0], names[1], - HBaseSaslRpcServer.SASL_PROPS, null); - break; - default: - throw new IOException("Unknown authentication method " + method); - } - if (saslClient == null) - throw new IOException("Unable to find SASL client implementation"); - } - - private static void readStatus(DataInputStream inStream) throws IOException { - int id = inStream.readInt(); // read and discard dummy id - int status = inStream.readInt(); // read status - if (status != SaslStatus.SUCCESS.state) { - throw new RemoteException(WritableUtils.readString(inStream), - WritableUtils.readString(inStream)); - } - } - - /** - * Do client side SASL authentication with server via the given InputStream - * and OutputStream - * - * @param inS - * InputStream to use - * @param outS - * OutputStream to use - * @return true if connection is set up, or false if needs to switch - * to simple Auth. - * @throws IOException - */ - public boolean saslConnect(InputStream inS, OutputStream outS) - throws IOException { - DataInputStream inStream = new DataInputStream(new BufferedInputStream(inS)); - DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream( - outS)); - - try { - byte[] saslToken = new byte[0]; - if (saslClient.hasInitialResponse()) - saslToken = saslClient.evaluateChallenge(saslToken); - if (saslToken != null) { - outStream.writeInt(saslToken.length); - outStream.write(saslToken, 0, saslToken.length); - outStream.flush(); - if (LOG.isDebugEnabled()) - LOG.debug("Have sent token of size " + saslToken.length - + " from initSASLContext."); - } - if (!saslClient.isComplete()) { - readStatus(inStream); - int len = inStream.readInt(); - if (len == HBaseSaslRpcServer.SWITCH_TO_SIMPLE_AUTH) { - if (LOG.isDebugEnabled()) - LOG.debug("Server asks us to fall back to simple auth."); - saslClient.dispose(); - return false; - } - saslToken = new byte[len]; - if (LOG.isDebugEnabled()) - LOG.debug("Will read input token of size " + saslToken.length - + " for processing by initSASLContext"); - inStream.readFully(saslToken); - } - - while (!saslClient.isComplete()) { - saslToken = saslClient.evaluateChallenge(saslToken); - if (saslToken != null) { - if (LOG.isDebugEnabled()) - LOG.debug("Will send token of size " + saslToken.length - + " from initSASLContext."); - outStream.writeInt(saslToken.length); - outStream.write(saslToken, 0, saslToken.length); - outStream.flush(); - } - if (!saslClient.isComplete()) { - readStatus(inStream); - saslToken = new byte[inStream.readInt()]; - if (LOG.isDebugEnabled()) - LOG.debug("Will read input token of size " + saslToken.length - + " for processing by initSASLContext"); - inStream.readFully(saslToken); - } - } - if (LOG.isDebugEnabled()) { - LOG.debug("SASL client context established. Negotiated QoP: " - + saslClient.getNegotiatedProperty(Sasl.QOP)); - } - return true; - } catch (IOException e) { - try { - saslClient.dispose(); - } catch (SaslException ignored) { - // ignore further exceptions during cleanup - } - throw e; - } - } - - /** - * Get a SASL wrapped InputStream. Can be called only after saslConnect() has - * been called. - * - * @param in - * the InputStream to wrap - * @return a SASL wrapped InputStream - * @throws IOException - */ - public InputStream getInputStream(InputStream in) throws IOException { - if (!saslClient.isComplete()) { - throw new IOException("Sasl authentication exchange hasn't completed yet"); - } - return new SaslInputStream(in, saslClient); - } - - /** - * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has - * been called. - * - * @param out - * the OutputStream to wrap - * @return a SASL wrapped OutputStream - * @throws IOException - */ - public OutputStream getOutputStream(OutputStream out) throws IOException { - if (!saslClient.isComplete()) { - throw new IOException("Sasl authentication exchange hasn't completed yet"); - } - return new SaslOutputStream(out, saslClient); - } - - /** Release resources used by wrapped saslClient */ - public void dispose() throws SaslException { - saslClient.dispose(); - } - - private static class SaslClientCallbackHandler implements CallbackHandler { - private final String userName; - private final char[] userPassword; - - public SaslClientCallbackHandler(Token token) { - this.userName = HBaseSaslRpcServer.encodeIdentifier(token.getIdentifier()); - this.userPassword = HBaseSaslRpcServer.encodePassword(token.getPassword()); - } - - public void handle(Callback[] callbacks) - throws UnsupportedCallbackException { - NameCallback nc = null; - PasswordCallback pc = null; - RealmCallback rc = null; - for (Callback callback : callbacks) { - if (callback instanceof RealmChoiceCallback) { - continue; - } else if (callback instanceof NameCallback) { - nc = (NameCallback) callback; - } else if (callback instanceof PasswordCallback) { - pc = (PasswordCallback) callback; - } else if (callback instanceof RealmCallback) { - rc = (RealmCallback) callback; - } else { - throw new UnsupportedCallbackException(callback, - "Unrecognized SASL client callback"); - } - } - if (nc != null) { - if (LOG.isDebugEnabled()) - LOG.debug("SASL client callback: setting username: " + userName); - nc.setName(userName); - } - if (pc != null) { - if (LOG.isDebugEnabled()) - LOG.debug("SASL client callback: setting userPassword"); - pc.setPassword(userPassword); - } - if (rc != null) { - if (LOG.isDebugEnabled()) - LOG.debug("SASL client callback: setting realm: " - + rc.getDefaultText()); - rc.setText(rc.getDefaultText()); - } - } - } -} Index: security/src/main/java/org/apache/hadoop/hbase/ipc/SecureConnectionHeader.java =================================================================== --- security/src/main/java/org/apache/hadoop/hbase/ipc/SecureConnectionHeader.java (revision 1326827) +++ security/src/main/java/org/apache/hadoop/hbase/ipc/SecureConnectionHeader.java (working copy) @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.ipc; - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; - -import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.AuthMethod; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.security.UserGroupInformation; - -/** - * The IPC connection header sent by the client to the server - * on connection establishment. Part of the {@link SecureRpcEngine} - * implementation. - */ -class SecureConnectionHeader extends ConnectionHeader { - private User user = null; - private AuthMethod authMethod; - - public SecureConnectionHeader() {} - - /** - * Create a new {@link org.apache.hadoop.hbase.ipc.SecureConnectionHeader} with the given protocol - * and {@link org.apache.hadoop.security.UserGroupInformation}. - * @param protocol protocol used for communication between the IPC client - * and the server - * @param ugi {@link org.apache.hadoop.security.UserGroupInformation} of the client communicating with - * the server - */ - public SecureConnectionHeader(String protocol, User user, AuthMethod authMethod) { - this.protocol = protocol; - this.user = user; - this.authMethod = authMethod; - } - - @Override - public void readFields(DataInput in) throws IOException { - protocol = Text.readString(in); - if (protocol.isEmpty()) { - protocol = null; - } - boolean ugiUsernamePresent = in.readBoolean(); - if (ugiUsernamePresent) { - String username = in.readUTF(); - boolean realUserNamePresent = in.readBoolean(); - if (realUserNamePresent) { - String realUserName = in.readUTF(); - UserGroupInformation realUserUgi = - UserGroupInformation.createRemoteUser(realUserName); - user = User.create( - UserGroupInformation.createProxyUser(username, realUserUgi)); - } else { - user = User.create(UserGroupInformation.createRemoteUser(username)); - } - } else { - user = null; - } - } - - @Override - public void write(DataOutput out) throws IOException { - Text.writeString(out, (protocol == null) ? "" : protocol); - if (user != null) { - UserGroupInformation ugi = user.getUGI(); - if (authMethod == AuthMethod.KERBEROS) { - // Send effective user for Kerberos auth - out.writeBoolean(true); - out.writeUTF(ugi.getUserName()); - out.writeBoolean(false); - } else if (authMethod == AuthMethod.DIGEST) { - // Don't send user for token auth - out.writeBoolean(false); - } else { - //Send both effective user and real user for simple auth - out.writeBoolean(true); - out.writeUTF(ugi.getUserName()); - if (ugi.getRealUser() != null) { - out.writeBoolean(true); - out.writeUTF(ugi.getRealUser().getUserName()); - } else { - out.writeBoolean(false); - } - } - } else { - out.writeBoolean(false); - } - } - - public String getProtocol() { - return protocol; - } - - public User getUser() { - return user; - } - - public String toString() { - return protocol + "-" + user; - } -} Index: security/src/main/java/org/apache/hadoop/hbase/ipc/SecureRpcEngine.java =================================================================== --- security/src/main/java/org/apache/hadoop/hbase/ipc/SecureRpcEngine.java (revision 1326827) +++ security/src/main/java/org/apache/hadoop/hbase/ipc/SecureRpcEngine.java (working copy) @@ -1,413 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.ipc; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.Server; -import org.apache.hadoop.hbase.client.RetriesExhaustedException; -import org.apache.hadoop.hbase.io.HbaseObjectWritable; -import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; -import org.apache.hadoop.hbase.security.HBasePolicyProvider; -import org.apache.hadoop.hbase.security.HBaseSaslRpcServer; -import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager; -import org.apache.hadoop.hbase.util.Objects; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate; -import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; - -import javax.net.SocketFactory; -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; -import java.lang.reflect.*; -import java.net.ConnectException; -import java.net.InetSocketAddress; -import java.net.SocketTimeoutException; -import java.util.HashMap; -import java.util.Map; - -/** - * A loadable RPC engine supporting SASL authentication of connections, using - * GSSAPI for Kerberos authentication or DIGEST-MD5 for authentication via - * signed tokens. - * - *

- * This is a fork of the {@code org.apache.hadoop.ipc.WriteableRpcEngine} from - * secure Hadoop, reworked to eliminate code duplication with the existing - * HBase {@link WritableRpcEngine}. - *

- * - * @see SecureClient - * @see SecureServer - */ -public class SecureRpcEngine implements RpcEngine { - // Leave this out in the hadoop ipc package but keep class name. Do this - // so that we dont' get the logging of this class's invocations by doing our - // blanket enabling DEBUG on the o.a.h.h. package. - protected static final Log LOG = - LogFactory.getLog("org.apache.hadoop.ipc.SecureRpcEngine"); - - private SecureRpcEngine() { - super(); - } // no public ctor - - /* Cache a client using its socket factory as the hash key */ - static private class ClientCache { - private Map clients = - new HashMap(); - - protected ClientCache() {} - - /** - * Construct & cache an IPC client with the user-provided SocketFactory - * if no cached client exists. - * - * @param conf Configuration - * @param factory socket factory - * @return an IPC client - */ - protected synchronized SecureClient getClient(Configuration conf, - SocketFactory factory) { - // Construct & cache client. The configuration is only used for timeout, - // and Clients have connection pools. So we can either (a) lose some - // connection pooling and leak sockets, or (b) use the same timeout for all - // configurations. Since the IPC is usually intended globally, not - // per-job, we choose (a). - SecureClient client = clients.get(factory); - if (client == null) { - // Make an hbase client instead of hadoop Client. - client = new SecureClient(HbaseObjectWritable.class, conf, factory); - clients.put(factory, client); - } else { - client.incCount(); - } - return client; - } - - /** - * Construct & cache an IPC client with the default SocketFactory - * if no cached client exists. - * - * @param conf Configuration - * @return an IPC client - */ - protected synchronized SecureClient getClient(Configuration conf) { - return getClient(conf, SocketFactory.getDefault()); - } - - /** - * Stop a RPC client connection - * A RPC client is closed only when its reference count becomes zero. - * @param client client to stop - */ - protected void stopClient(SecureClient client) { - synchronized (this) { - client.decCount(); - if (client.isZeroReference()) { - clients.remove(client.getSocketFactory()); - } - } - if (client.isZeroReference()) { - client.stop(); - } - } - } - - protected final static ClientCache CLIENTS = new ClientCache(); - - private static class Invoker implements InvocationHandler { - private Class protocol; - private InetSocketAddress address; - private User ticket; - private SecureClient client; - private boolean isClosed = false; - final private int rpcTimeout; - - public Invoker(Class protocol, - InetSocketAddress address, User ticket, - Configuration conf, SocketFactory factory, int rpcTimeout) { - this.protocol = protocol; - this.address = address; - this.ticket = ticket; - this.client = CLIENTS.getClient(conf, factory); - this.rpcTimeout = rpcTimeout; - } - - public Object invoke(Object proxy, Method method, Object[] args) - throws Throwable { - final boolean logDebug = LOG.isDebugEnabled(); - long startTime = 0; - if (logDebug) { - startTime = System.currentTimeMillis(); - } - HbaseObjectWritable value = (HbaseObjectWritable) - client.call(new Invocation(method, args), address, - protocol, ticket, rpcTimeout); - if (logDebug) { - long callTime = System.currentTimeMillis() - startTime; - LOG.debug("Call: " + method.getName() + " " + callTime); - } - return value.get(); - } - - /* close the IPC client that's responsible for this invoker's RPCs */ - synchronized protected void close() { - if (!isClosed) { - isClosed = true; - CLIENTS.stopClient(client); - } - } - } - - /** - * Construct a client-side proxy object that implements the named protocol, - * talking to a server at the named address. - * - * @param protocol interface - * @param clientVersion version we are expecting - * @param addr remote address - * @param ticket ticket - * @param conf configuration - * @param factory socket factory - * @return proxy - * @throws java.io.IOException e - */ - public VersionedProtocol getProxy( - Class protocol, long clientVersion, - InetSocketAddress addr, User ticket, - Configuration conf, SocketFactory factory, int rpcTimeout) - throws IOException { - if (User.isSecurityEnabled()) { - HBaseSaslRpcServer.init(conf); - } - VersionedProtocol proxy = - (VersionedProtocol) Proxy.newProxyInstance( - protocol.getClassLoader(), new Class[] { protocol }, - new Invoker(protocol, addr, ticket, conf, factory, rpcTimeout)); - long serverVersion = proxy.getProtocolVersion(protocol.getName(), - clientVersion); - if (serverVersion != clientVersion) { - throw new HBaseRPC.VersionMismatch(protocol.getName(), clientVersion, - serverVersion); - } - return proxy; - } - - /** - * Stop this proxy and release its invoker's resource - * @param proxy the proxy to be stopped - */ - public void stopProxy(VersionedProtocol proxy) { - if (proxy!=null) { - ((Invoker)Proxy.getInvocationHandler(proxy)).close(); - } - } - - - /** Expert: Make multiple, parallel calls to a set of servers. */ - public Object[] call(Method method, Object[][] params, - InetSocketAddress[] addrs, - Class protocol, - User ticket, Configuration conf) - throws IOException, InterruptedException { - - Invocation[] invocations = new Invocation[params.length]; - for (int i = 0; i < params.length; i++) - invocations[i] = new Invocation(method, params[i]); - SecureClient client = CLIENTS.getClient(conf); - try { - Writable[] wrappedValues = - client.call(invocations, addrs, protocol, ticket); - - if (method.getReturnType() == Void.TYPE) { - return null; - } - - Object[] values = - (Object[])Array.newInstance(method.getReturnType(), wrappedValues.length); - for (int i = 0; i < values.length; i++) - if (wrappedValues[i] != null) - values[i] = ((HbaseObjectWritable)wrappedValues[i]).get(); - - return values; - } finally { - CLIENTS.stopClient(client); - } - } - - /** Construct a server for a protocol implementation instance listening on a - * port and address, with a secret manager. */ - public Server getServer(Class protocol, - final Object instance, - Class[] ifaces, - final String bindAddress, final int port, - final int numHandlers, - int metaHandlerCount, final boolean verbose, Configuration conf, - int highPriorityLevel) - throws IOException { - Server server = new Server(instance, ifaces, conf, bindAddress, port, - numHandlers, metaHandlerCount, verbose, - highPriorityLevel); - return server; - } - - /** An RPC Server. */ - public static class Server extends SecureServer { - private Object instance; - private Class implementation; - private Class[] ifaces; - private boolean verbose; - - private static String classNameBase(String className) { - String[] names = className.split("\\.", -1); - if (names == null || names.length == 0) { - return className; - } - return names[names.length-1]; - } - - /** Construct an RPC server. - * @param instance the instance whose methods will be called - * @param conf the configuration to use - * @param bindAddress the address to bind on to listen for connection - * @param port the port to listen for connections on - * @param numHandlers the number of method handler threads to run - * @param verbose whether each call should be logged - * @throws java.io.IOException e - */ - public Server(Object instance, final Class[] ifaces, - Configuration conf, String bindAddress, int port, - int numHandlers, int metaHandlerCount, boolean verbose, - int highPriorityLevel) - throws IOException { - super(bindAddress, port, Invocation.class, numHandlers, metaHandlerCount, conf, - classNameBase(instance.getClass().getName()), highPriorityLevel); - this.instance = instance; - this.implementation = instance.getClass(); - this.verbose = verbose; - - this.ifaces = ifaces; - - // create metrics for the advertised interfaces this server implements. - this.rpcMetrics.createMetrics(this.ifaces); - } - - public AuthenticationTokenSecretManager createSecretManager(){ - if (instance instanceof org.apache.hadoop.hbase.Server) { - org.apache.hadoop.hbase.Server server = - (org.apache.hadoop.hbase.Server)instance; - Configuration conf = server.getConfiguration(); - long keyUpdateInterval = - conf.getLong("hbase.auth.key.update.interval", 24*60*60*1000); - long maxAge = - conf.getLong("hbase.auth.token.max.lifetime", 7*24*60*60*1000); - return new AuthenticationTokenSecretManager(conf, server.getZooKeeper(), - server.getServerName().toString(), keyUpdateInterval, maxAge); - } - return null; - } - - @Override - public void startThreads() { - AuthenticationTokenSecretManager mgr = createSecretManager(); - if (mgr != null) { - setSecretManager(mgr); - mgr.start(); - } - this.authManager = new ServiceAuthorizationManager(); - HBasePolicyProvider.init(conf, authManager); - - // continue with base startup - super.startThreads(); - } - - @Override - public Writable call(Class protocol, - Writable param, long receivedTime, MonitoredRPCHandler status) - throws IOException { - try { - Invocation call = (Invocation)param; - if(call.getMethodName() == null) { - throw new IOException("Could not find requested method, the usual " + - "cause is a version mismatch between client and server."); - } - if (verbose) log("Call: " + call); - - Method method = - protocol.getMethod(call.getMethodName(), - call.getParameterClasses()); - method.setAccessible(true); - - Object impl = null; - if (protocol.isAssignableFrom(this.implementation)) { - impl = this.instance; - } - else { - throw new HBaseRPC.UnknownProtocolException(protocol); - } - - long startTime = System.currentTimeMillis(); - Object[] params = call.getParameters(); - Object value = method.invoke(impl, params); - int processingTime = (int) (System.currentTimeMillis() - startTime); - int qTime = (int) (startTime-receivedTime); - if (TRACELOG.isDebugEnabled()) { - TRACELOG.debug("Call #" + CurCall.get().id + - "; Served: " + protocol.getSimpleName()+"#"+call.getMethodName() + - " queueTime=" + qTime + - " processingTime=" + processingTime + - " contents=" + Objects.describeQuantity(params)); - } - rpcMetrics.rpcQueueTime.inc(qTime); - rpcMetrics.rpcProcessingTime.inc(processingTime); - rpcMetrics.inc(call.getMethodName(), processingTime); - if (verbose) log("Return: "+value); - - return new HbaseObjectWritable(method.getReturnType(), value); - } catch (InvocationTargetException e) { - Throwable target = e.getTargetException(); - if (target instanceof IOException) { - throw (IOException)target; - } - IOException ioe = new IOException(target.toString()); - ioe.setStackTrace(target.getStackTrace()); - throw ioe; - } catch (Throwable e) { - if (!(e instanceof IOException)) { - LOG.error("Unexpected throwable object ", e); - } - IOException ioe = new IOException(e.toString()); - ioe.setStackTrace(e.getStackTrace()); - throw ioe; - } - } - } - - protected static void log(String value) { - String v = value; - if (v != null && v.length() > 55) - v = v.substring(0, 55)+"..."; - LOG.info(v); - } -} \ No newline at end of file Index: security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java =================================================================== --- security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java (revision 1326827) +++ security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java (working copy) @@ -1,522 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.ipc; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; -import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.AuthMethod; -import org.apache.hadoop.hbase.security.KerberosInfo; -import org.apache.hadoop.hbase.security.TokenInfo; -import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier; -import org.apache.hadoop.hbase.security.token.AuthenticationTokenSelector; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.PoolMap; -import org.apache.hadoop.io.*; -import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.security.SecurityUtil; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.security.token.TokenSelector; -import org.apache.hadoop.util.ReflectionUtils; - -import javax.net.SocketFactory; -import java.io.*; -import java.net.*; -import java.security.PrivilegedExceptionAction; -import java.util.HashMap; -import java.util.Hashtable; -import java.util.Iterator; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Random; -import java.util.concurrent.ConcurrentSkipListMap; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; - -/** - * A client for an IPC service, which support SASL authentication of connections - * using either GSSAPI for Kerberos authentication or DIGEST-MD5 for - * authentication using signed tokens. - * - *

- * This is a copy of org.apache.hadoop.ipc.Client from secure Hadoop, - * reworked to remove code duplicated with - * {@link org.apache.hadoop.hbase.HBaseClient}. This is part of the loadable - * {@link SecureRpcEngine}, and only functions in connection with a - * {@link SecureServer} instance. - *

- */ -public class SecureClient extends HBaseClient { - - private static final Log LOG = - LogFactory.getLog("org.apache.hadoop.ipc.SecureClient"); - - protected static Map> tokenHandlers = - new HashMap>(); - static { - tokenHandlers.put(AuthenticationTokenIdentifier.AUTH_TOKEN_TYPE.toString(), - new AuthenticationTokenSelector()); - } - - /** Thread that reads responses and notifies callers. Each connection owns a - * socket connected to a remote address. Calls are multiplexed through this - * socket: responses may be delivered out of order. */ - protected class SecureConnection extends Connection { - private InetSocketAddress server; // server ip:port - private String serverPrincipal; // server's krb5 principal name - private SecureConnectionHeader header; // connection header - private AuthMethod authMethod; // authentication method - private boolean useSasl; - private Token token; - private HBaseSaslRpcClient saslRpcClient; - private int reloginMaxBackoff; // max pause before relogin on sasl failure - - public SecureConnection(ConnectionId remoteId) throws IOException { - super(remoteId); - this.server = remoteId.getAddress(); - - User ticket = remoteId.getTicket(); - Class protocol = remoteId.getProtocol(); - this.useSasl = User.isSecurityEnabled(); - if (useSasl && protocol != null) { - TokenInfo tokenInfo = protocol.getAnnotation(TokenInfo.class); - if (tokenInfo != null) { - TokenSelector tokenSelector = - tokenHandlers.get(tokenInfo.value()); - if (tokenSelector != null) { - token = tokenSelector.selectToken(new Text(clusterId), - ticket.getUGI().getTokens()); - } else if (LOG.isDebugEnabled()) { - LOG.debug("No token selector found for type "+tokenInfo.value()); - } - } - KerberosInfo krbInfo = protocol.getAnnotation(KerberosInfo.class); - if (krbInfo != null) { - String serverKey = krbInfo.serverPrincipal(); - if (serverKey == null) { - throw new IOException( - "Can't obtain server Kerberos config key from KerberosInfo"); - } - serverPrincipal = SecurityUtil.getServerPrincipal( - conf.get(serverKey), server.getAddress().getCanonicalHostName().toLowerCase()); - if (LOG.isDebugEnabled()) { - LOG.debug("RPC Server Kerberos principal name for protocol=" - + protocol.getCanonicalName() + " is " + serverPrincipal); - } - } - } - - if (!useSasl) { - authMethod = AuthMethod.SIMPLE; - } else if (token != null) { - authMethod = AuthMethod.DIGEST; - } else { - authMethod = AuthMethod.KERBEROS; - } - - header = new SecureConnectionHeader( - protocol == null ? null : protocol.getName(), ticket, authMethod); - - if (LOG.isDebugEnabled()) - LOG.debug("Use " + authMethod + " authentication for protocol " - + protocol.getSimpleName()); - - reloginMaxBackoff = conf.getInt("hbase.security.relogin.maxbackoff", 5000); - } - - private synchronized void disposeSasl() { - if (saslRpcClient != null) { - try { - saslRpcClient.dispose(); - saslRpcClient = null; - } catch (IOException ioe) { - LOG.info("Error disposing of SASL client", ioe); - } - } - } - - @Override - protected void sendParam(Call call) { - if (shouldCloseConnection.get()) { - return; - } - // For serializing the data to be written. - - final DataOutputBuffer d = new DataOutputBuffer(); - try { - if (LOG.isDebugEnabled()) { - LOG.debug(getName() + " sending #" + call.id); - } - d.writeInt(0xdeadbeef); // placeholder for data length - d.writeInt(call.id); - call.param.write(d); - byte[] data = d.getData(); - int dataLength = d.getLength(); - // fill in the placeholder - Bytes.putInt(data, 0, dataLength - 4); - //noinspection SynchronizeOnNonFinalField - synchronized (this.out) { // FindBugs IS2_INCONSISTENT_SYNC - out.write(data, 0, dataLength); - out.flush(); - } - } catch(IOException e) { - markClosed(e); - } finally { - //the buffer is just an in-memory buffer, but it is still polite to - // close early - IOUtils.closeStream(d); - } - } - - private synchronized boolean shouldAuthenticateOverKrb() throws IOException { - UserGroupInformation loginUser = UserGroupInformation.getLoginUser(); - UserGroupInformation currentUser = - UserGroupInformation.getCurrentUser(); - UserGroupInformation realUser = currentUser.getRealUser(); - return authMethod == AuthMethod.KERBEROS && - loginUser != null && - //Make sure user logged in using Kerberos either keytab or TGT - loginUser.hasKerberosCredentials() && - // relogin only in case it is the login user (e.g. JT) - // or superuser (like oozie). - (loginUser.equals(currentUser) || loginUser.equals(realUser)); - } - - private synchronized boolean setupSaslConnection(final InputStream in2, - final OutputStream out2) - throws IOException { - saslRpcClient = new HBaseSaslRpcClient(authMethod, token, serverPrincipal); - return saslRpcClient.saslConnect(in2, out2); - } - - /** - * If multiple clients with the same principal try to connect - * to the same server at the same time, the server assumes a - * replay attack is in progress. This is a feature of kerberos. - * In order to work around this, what is done is that the client - * backs off randomly and tries to initiate the connection - * again. - * The other problem is to do with ticket expiry. To handle that, - * a relogin is attempted. - */ - private synchronized void handleSaslConnectionFailure( - final int currRetries, - final int maxRetries, final Exception ex, final Random rand, - final User user) - throws IOException, InterruptedException{ - user.runAs(new PrivilegedExceptionAction() { - public Object run() throws IOException, InterruptedException { - closeConnection(); - if (shouldAuthenticateOverKrb()) { - if (currRetries < maxRetries) { - LOG.debug("Exception encountered while connecting to " + - "the server : " + ex); - //try re-login - if (UserGroupInformation.isLoginKeytabBased()) { - UserGroupInformation.getLoginUser().reloginFromKeytab(); - } else { - UserGroupInformation.getLoginUser().reloginFromTicketCache(); - } - disposeSasl(); - //have granularity of milliseconds - //we are sleeping with the Connection lock held but since this - //connection instance is being used for connecting to the server - //in question, it is okay - Thread.sleep((rand.nextInt(reloginMaxBackoff) + 1)); - return null; - } else { - String msg = "Couldn't setup connection for " + - UserGroupInformation.getLoginUser().getUserName() + - " to " + serverPrincipal; - LOG.warn(msg); - throw (IOException) new IOException(msg).initCause(ex); - } - } else { - LOG.warn("Exception encountered while connecting to " + - "the server : " + ex); - } - if (ex instanceof RemoteException) - throw (RemoteException)ex; - throw new IOException(ex); - } - }); - } - - @Override - protected synchronized void setupIOstreams() - throws IOException, InterruptedException { - if (socket != null || shouldCloseConnection.get()) { - return; - } - - try { - if (LOG.isDebugEnabled()) { - LOG.debug("Connecting to "+server); - } - short numRetries = 0; - final short MAX_RETRIES = 5; - Random rand = null; - while (true) { - setupConnection(); - InputStream inStream = NetUtils.getInputStream(socket); - OutputStream outStream = NetUtils.getOutputStream(socket); - writeRpcHeader(outStream); - if (useSasl) { - final InputStream in2 = inStream; - final OutputStream out2 = outStream; - User ticket = remoteId.getTicket(); - if (authMethod == AuthMethod.KERBEROS) { - UserGroupInformation ugi = ticket.getUGI(); - if (ugi != null && ugi.getRealUser() != null) { - ticket = User.create(ugi.getRealUser()); - } - } - boolean continueSasl = false; - try { - continueSasl = - ticket.runAs(new PrivilegedExceptionAction() { - @Override - public Boolean run() throws IOException { - return setupSaslConnection(in2, out2); - } - }); - } catch (Exception ex) { - if (rand == null) { - rand = new Random(); - } - handleSaslConnectionFailure(numRetries++, MAX_RETRIES, ex, rand, - ticket); - continue; - } - if (continueSasl) { - // Sasl connect is successful. Let's set up Sasl i/o streams. - inStream = saslRpcClient.getInputStream(inStream); - outStream = saslRpcClient.getOutputStream(outStream); - } else { - // fall back to simple auth because server told us so. - authMethod = AuthMethod.SIMPLE; - header = new SecureConnectionHeader(header.getProtocol(), - header.getUser(), authMethod); - useSasl = false; - } - } - this.in = new DataInputStream(new BufferedInputStream - (new PingInputStream(inStream))); - this.out = new DataOutputStream - (new BufferedOutputStream(outStream)); - writeHeader(); - - // update last activity time - touch(); - - // start the receiver thread after the socket connection has been set up - start(); - return; - } - } catch (IOException e) { - markClosed(e); - close(); - - throw e; - } - } - - /* Write the RPC header */ - private void writeRpcHeader(OutputStream outStream) throws IOException { - DataOutputStream out = new DataOutputStream(new BufferedOutputStream(outStream)); - // Write out the header, version and authentication method - out.write(SecureServer.HEADER.array()); - out.write(SecureServer.CURRENT_VERSION); - authMethod.write(out); - out.flush(); - } - - /** - * Write the protocol header for each connection - * Out is not synchronized because only the first thread does this. - */ - private void writeHeader() throws IOException { - // Write out the ConnectionHeader - DataOutputBuffer buf = new DataOutputBuffer(); - header.write(buf); - - // Write out the payload length - int bufLen = buf.getLength(); - out.writeInt(bufLen); - out.write(buf.getData(), 0, bufLen); - } - - @Override - protected void receiveResponse() { - if (shouldCloseConnection.get()) { - return; - } - touch(); - - try { - int id = in.readInt(); // try to read an id - - if (LOG.isDebugEnabled()) - LOG.debug(getName() + " got value #" + id); - - Call call = calls.remove(id); - - int state = in.readInt(); // read call status - if (LOG.isDebugEnabled()) { - LOG.debug("call #"+id+" state is " + state); - } - if (state == Status.SUCCESS.state) { - Writable value = ReflectionUtils.newInstance(valueClass, conf); - value.readFields(in); // read value - if (LOG.isDebugEnabled()) { - LOG.debug("call #"+id+", response is:\n"+value.toString()); - } - call.setValue(value); - } else if (state == Status.ERROR.state) { - call.setException(new RemoteException(WritableUtils.readString(in), - WritableUtils.readString(in))); - } else if (state == Status.FATAL.state) { - // Close the connection - markClosed(new RemoteException(WritableUtils.readString(in), - WritableUtils.readString(in))); - } - } catch (IOException e) { - if (e instanceof SocketTimeoutException && remoteId.rpcTimeout > 0) { - // Clean up open calls but don't treat this as a fatal condition, - // since we expect certain responses to not make it by the specified - // {@link ConnectionId#rpcTimeout}. - closeException = e; - } else { - // Since the server did not respond within the default ping interval - // time, treat this as a fatal condition and close this connection - markClosed(e); - } - } finally { - if (remoteId.rpcTimeout > 0) { - cleanupCalls(remoteId.rpcTimeout); - } - } - } - - /** Close the connection. */ - protected synchronized void close() { - if (!shouldCloseConnection.get()) { - LOG.error("The connection is not in the closed state"); - return; - } - - // release the resources - // first thing to do;take the connection out of the connection list - synchronized (connections) { - if (connections.get(remoteId) == this) { - connections.remove(remoteId); - } - } - - // close the streams and therefore the socket - IOUtils.closeStream(out); - IOUtils.closeStream(in); - disposeSasl(); - - // clean up all calls - if (closeException == null) { - if (!calls.isEmpty()) { - LOG.warn( - "A connection is closed for no cause and calls are not empty"); - - // clean up calls anyway - closeException = new IOException("Unexpected closed connection"); - cleanupCalls(); - } - } else { - // log the info - if (LOG.isDebugEnabled()) { - LOG.debug("closing ipc connection to " + server + ": " + - closeException.getMessage(),closeException); - } - - // cleanup calls - cleanupCalls(); - } - if (LOG.isDebugEnabled()) - LOG.debug(getName() + ": closed"); - } - } - - /** - * Construct an IPC client whose values are of the given {@link org.apache.hadoop.io.Writable} - * class. - * @param valueClass value class - * @param conf configuration - * @param factory socket factory - */ - public SecureClient(Class valueClass, Configuration conf, - SocketFactory factory) { - super(valueClass, conf, factory); - } - - /** - * Construct an IPC client with the default SocketFactory - * @param valueClass value class - * @param conf configuration - */ - public SecureClient(Class valueClass, Configuration conf) { - this(valueClass, conf, NetUtils.getDefaultSocketFactory(conf)); - } - - @Override - protected SecureConnection getConnection(InetSocketAddress addr, - Class protocol, - User ticket, - int rpcTimeout, - Call call) - throws IOException, InterruptedException { - if (!running.get()) { - // the client is stopped - throw new IOException("The client is stopped"); - } - SecureConnection connection; - /* we could avoid this allocation for each RPC by having a - * connectionsId object and with set() method. We need to manage the - * refs for keys in HashMap properly. For now its ok. - */ - ConnectionId remoteId = new ConnectionId(addr, protocol, ticket, rpcTimeout); - do { - synchronized (connections) { - connection = (SecureConnection)connections.get(remoteId); - if (connection == null) { - connection = new SecureConnection(remoteId); - connections.put(remoteId, connection); - } - } - } while (!connection.addCall(call)); - - //we don't invoke the method below inside "synchronized (connections)" - //block above. The reason for that is if the server happens to be slow, - //it will take longer to establish a connection and that will slow the - //entire system down. - connection.setupIOstreams(); - return connection; - } -} \ No newline at end of file Index: security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java =================================================================== --- security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java (revision 1326827) +++ security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java (working copy) @@ -1,739 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.ipc; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.io.HbaseObjectWritable; -import org.apache.hadoop.hbase.io.WritableWithSize; -import org.apache.hadoop.hbase.security.HBaseSaslRpcServer; -import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.AuthMethod; -import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslDigestCallbackHandler; -import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslGssCallbackHandler; -import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslStatus; -import org.apache.hadoop.hbase.security.User; -import org.apache.hadoop.hbase.util.ByteBufferOutputStream; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.io.BytesWritable; -import org.apache.hadoop.io.IntWritable; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableUtils; -import org.apache.hadoop.security.AccessControlException; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; -import org.apache.hadoop.security.authorize.AuthorizationException; -import org.apache.hadoop.security.authorize.ProxyUsers; -import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; -import org.apache.hadoop.security.token.SecretManager; -import org.apache.hadoop.security.token.SecretManager.InvalidToken; -import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.util.ReflectionUtils; -import org.apache.hadoop.util.StringUtils; - -import com.google.common.collect.ImmutableSet; - -import javax.security.sasl.Sasl; -import javax.security.sasl.SaslException; -import javax.security.sasl.SaslServer; -import java.io.*; -import java.net.*; -import java.nio.ByteBuffer; -import java.nio.channels.*; -import java.security.PrivilegedExceptionAction; -import java.util.*; - -import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION; - -/** - * An abstract IPC service, supporting SASL authentication of connections, - * using GSSAPI for Kerberos authentication or DIGEST-MD5 for authentication - * via signed tokens. - * - *

- * This is part of the {@link SecureRpcEngine} implementation. - *

- * - * @see org.apache.hadoop.hbase.ipc.SecureClient - */ -public abstract class SecureServer extends HBaseServer { - private final boolean authorize; - private boolean isSecurityEnabled; - - /** - * The first four bytes of secure RPC connections - */ - public static final ByteBuffer HEADER = ByteBuffer.wrap("srpc".getBytes()); - - // 1 : Introduce ping and server does not throw away RPCs - // 3 : Introduce the protocol into the RPC connection header - // 4 : Introduced SASL security layer - public static final byte CURRENT_VERSION = 4; - public static final Set INSECURE_VERSIONS = ImmutableSet.of((byte) 3); - - public static final Log LOG = LogFactory.getLog("org.apache.hadoop.ipc.SecureServer"); - private static final Log AUDITLOG = - LogFactory.getLog("SecurityLogger.org.apache.hadoop.ipc.SecureServer"); - private static final String AUTH_FAILED_FOR = "Auth failed for "; - private static final String AUTH_SUCCESSFUL_FOR = "Auth successful for "; - - protected SecretManager secretManager; - protected ServiceAuthorizationManager authManager; - - protected class SecureCall extends HBaseServer.Call { - public SecureCall(int id, Writable param, Connection connection, - Responder responder, long size) { - super(id, param, connection, responder, size); - } - - @Override - protected synchronized void setResponse(Object value, Status status, - String errorClass, String error) { - Writable result = null; - if (value instanceof Writable) { - result = (Writable) value; - } else { - /* We might have a null value and errors. Avoid creating a - * HbaseObjectWritable, because the constructor fails on null. */ - if (value != null) { - result = new HbaseObjectWritable(value); - } - } - - int size = BUFFER_INITIAL_SIZE; - if (result instanceof WritableWithSize) { - // get the size hint. - WritableWithSize ohint = (WritableWithSize) result; - long hint = ohint.getWritableSize() + Bytes.SIZEOF_INT + Bytes.SIZEOF_INT; - if (hint > Integer.MAX_VALUE) { - // oops, new problem. - IOException ioe = - new IOException("Result buffer size too large: " + hint); - errorClass = ioe.getClass().getName(); - error = StringUtils.stringifyException(ioe); - } else { - size = (int)hint; - } - } - - ByteBufferOutputStream buf = new ByteBufferOutputStream(size); - DataOutputStream out = new DataOutputStream(buf); - try { - out.writeInt(this.id); // write call id - out.writeInt(status.state); // write status - } catch (IOException e) { - errorClass = e.getClass().getName(); - error = StringUtils.stringifyException(e); - } - - try { - if (status == Status.SUCCESS) { - result.write(out); - } else { - WritableUtils.writeString(out, errorClass); - WritableUtils.writeString(out, error); - } - if (((SecureConnection)connection).useWrap) { - wrapWithSasl(buf); - } - } catch (IOException e) { - LOG.warn("Error sending response to call: ", e); - } - - this.response = buf.getByteBuffer(); - } - - private void wrapWithSasl(ByteBufferOutputStream response) - throws IOException { - if (((SecureConnection)connection).useSasl) { - // getByteBuffer calls flip() - ByteBuffer buf = response.getByteBuffer(); - byte[] token; - // synchronization may be needed since there can be multiple Handler - // threads using saslServer to wrap responses. - synchronized (((SecureConnection)connection).saslServer) { - token = ((SecureConnection)connection).saslServer.wrap(buf.array(), - buf.arrayOffset(), buf.remaining()); - } - if (LOG.isDebugEnabled()) - LOG.debug("Adding saslServer wrapped token of size " + token.length - + " as call response."); - buf.clear(); - DataOutputStream saslOut = new DataOutputStream(response); - saslOut.writeInt(token.length); - saslOut.write(token, 0, token.length); - } - } - } - - /** Reads calls from a connection and queues them for handling. */ - public class SecureConnection extends HBaseServer.Connection { - private boolean rpcHeaderRead = false; // if initial rpc header is read - private boolean headerRead = false; //if the connection header that - //follows version is read. - private ByteBuffer data; - private ByteBuffer dataLengthBuffer; - protected final LinkedList responseQueue; - private int dataLength; - private InetAddress addr; - - boolean useSasl; - SaslServer saslServer; - private AuthMethod authMethod; - private boolean saslContextEstablished; - private boolean skipInitialSaslHandshake; - private ByteBuffer rpcHeaderBuffer; - private ByteBuffer unwrappedData; - private ByteBuffer unwrappedDataLengthBuffer; - private SecureConnectionHeader header; - - public UserGroupInformation attemptingUser = null; // user name before auth - - // Fake 'call' for failed authorization response - private final int AUTHORIZATION_FAILED_CALLID = -1; - // Fake 'call' for SASL context setup - private static final int SASL_CALLID = -33; - private final SecureCall saslCall = new SecureCall(SASL_CALLID, null, this, null, 0); - - private boolean useWrap = false; - - public SecureConnection(SocketChannel channel, long lastContact) { - super(channel, lastContact); - this.header = new SecureConnectionHeader(); - this.channel = channel; - this.data = null; - this.dataLengthBuffer = ByteBuffer.allocate(4); - this.unwrappedData = null; - this.unwrappedDataLengthBuffer = ByteBuffer.allocate(4); - this.socket = channel.socket(); - this.addr = socket.getInetAddress(); - this.responseQueue = new LinkedList(); - } - - @Override - public String toString() { - return getHostAddress() + ":" + remotePort; - } - - public String getHostAddress() { - return hostAddress; - } - - public InetAddress getHostInetAddress() { - return addr; - } - - private User getAuthorizedUgi(String authorizedId) - throws IOException { - if (authMethod == AuthMethod.DIGEST) { - TokenIdentifier tokenId = HBaseSaslRpcServer.getIdentifier(authorizedId, - secretManager); - UserGroupInformation ugi = tokenId.getUser(); - if (ugi == null) { - throw new AccessControlException( - "Can't retrieve username from tokenIdentifier."); - } - ugi.addTokenIdentifier(tokenId); - return User.create(ugi); - } else { - return User.create(UserGroupInformation.createRemoteUser(authorizedId)); - } - } - - private void saslReadAndProcess(byte[] saslToken) throws IOException, - InterruptedException { - if (!saslContextEstablished) { - byte[] replyToken = null; - try { - if (saslServer == null) { - switch (authMethod) { - case DIGEST: - if (secretManager == null) { - throw new AccessControlException( - "Server is not configured to do DIGEST authentication."); - } - saslServer = Sasl.createSaslServer(AuthMethod.DIGEST - .getMechanismName(), null, HBaseSaslRpcServer.SASL_DEFAULT_REALM, - HBaseSaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler( - secretManager, this)); - break; - default: - UserGroupInformation current = UserGroupInformation - .getCurrentUser(); - String fullName = current.getUserName(); - if (LOG.isDebugEnabled()) - LOG.debug("Kerberos principal name is " + fullName); - final String names[] = HBaseSaslRpcServer.splitKerberosName(fullName); - if (names.length != 3) { - throw new AccessControlException( - "Kerberos principal name does NOT have the expected " - + "hostname part: " + fullName); - } - current.doAs(new PrivilegedExceptionAction() { - @Override - public Object run() throws SaslException { - saslServer = Sasl.createSaslServer(AuthMethod.KERBEROS - .getMechanismName(), names[0], names[1], - HBaseSaslRpcServer.SASL_PROPS, new SaslGssCallbackHandler()); - return null; - } - }); - } - if (saslServer == null) - throw new AccessControlException( - "Unable to find SASL server implementation for " - + authMethod.getMechanismName()); - if (LOG.isDebugEnabled()) - LOG.debug("Created SASL server with mechanism = " - + authMethod.getMechanismName()); - } - if (LOG.isDebugEnabled()) - LOG.debug("Have read input token of size " + saslToken.length - + " for processing by saslServer.evaluateResponse()"); - replyToken = saslServer.evaluateResponse(saslToken); - } catch (IOException e) { - IOException sendToClient = e; - Throwable cause = e; - while (cause != null) { - if (cause instanceof InvalidToken) { - sendToClient = (InvalidToken) cause; - break; - } - cause = cause.getCause(); - } - doSaslReply(SaslStatus.ERROR, null, sendToClient.getClass().getName(), - sendToClient.getLocalizedMessage()); - rpcMetrics.authenticationFailures.inc(); - String clientIP = this.toString(); - // attempting user could be null - AUDITLOG.warn(AUTH_FAILED_FOR + clientIP + ":" + attemptingUser); - throw e; - } - if (replyToken != null) { - if (LOG.isDebugEnabled()) - LOG.debug("Will send token of size " + replyToken.length - + " from saslServer."); - doSaslReply(SaslStatus.SUCCESS, new BytesWritable(replyToken), null, - null); - } - if (saslServer.isComplete()) { - LOG.debug("SASL server context established. Negotiated QoP is " - + saslServer.getNegotiatedProperty(Sasl.QOP)); - String qop = (String) saslServer.getNegotiatedProperty(Sasl.QOP); - useWrap = qop != null && !"auth".equalsIgnoreCase(qop); - user = getAuthorizedUgi(saslServer.getAuthorizationID()); - LOG.debug("SASL server successfully authenticated client: " + user); - rpcMetrics.authenticationSuccesses.inc(); - AUDITLOG.trace(AUTH_SUCCESSFUL_FOR + user); - saslContextEstablished = true; - } - } else { - if (LOG.isDebugEnabled()) - LOG.debug("Have read input token of size " + saslToken.length - + " for processing by saslServer.unwrap()"); - - if (!useWrap) { - processOneRpc(saslToken); - } else { - byte[] plaintextData = saslServer.unwrap(saslToken, 0, - saslToken.length); - processUnwrappedData(plaintextData); - } - } - } - - private void doSaslReply(SaslStatus status, Writable rv, - String errorClass, String error) throws IOException { - saslCall.setResponse(rv, - status == SaslStatus.SUCCESS ? Status.SUCCESS : Status.ERROR, - errorClass, error); - saslCall.responder = responder; - saslCall.sendResponseIfReady(); - } - - private void disposeSasl() { - if (saslServer != null) { - try { - saslServer.dispose(); - } catch (SaslException ignored) { - } - } - } - - public int readAndProcess() throws IOException, InterruptedException { - while (true) { - /* Read at most one RPC. If the header is not read completely yet - * then iterate until we read first RPC or until there is no data left. - */ - int count = -1; - if (dataLengthBuffer.remaining() > 0) { - count = channelRead(channel, dataLengthBuffer); - if (count < 0 || dataLengthBuffer.remaining() > 0) - return count; - } - - if (!rpcHeaderRead) { - //Every connection is expected to send the header. - if (rpcHeaderBuffer == null) { - rpcHeaderBuffer = ByteBuffer.allocate(2); - } - count = channelRead(channel, rpcHeaderBuffer); - if (count < 0 || rpcHeaderBuffer.remaining() > 0) { - return count; - } - int version = rpcHeaderBuffer.get(0); - byte[] method = new byte[] {rpcHeaderBuffer.get(1)}; - authMethod = AuthMethod.read(new DataInputStream( - new ByteArrayInputStream(method))); - dataLengthBuffer.flip(); - if (!HEADER.equals(dataLengthBuffer) || version != CURRENT_VERSION) { - //Warning is ok since this is not supposed to happen. - if (INSECURE_VERSIONS.contains(version)) { - LOG.warn("An insecure client (version '" + version + "') is attempting to connect " + - " to this version '" + CURRENT_VERSION + "' secure server from " + - hostAddress + ":" + remotePort); - } else { - LOG.warn("Incorrect header or version mismatch from " + - hostAddress + ":" + remotePort + - " got version " + version + - " expected version " + CURRENT_VERSION); - } - - return -1; - } - dataLengthBuffer.clear(); - if (authMethod == null) { - throw new IOException("Unable to read authentication method"); - } - if (isSecurityEnabled && authMethod == AuthMethod.SIMPLE) { - AccessControlException ae = new AccessControlException( - "Authentication is required"); - SecureCall failedCall = new SecureCall(AUTHORIZATION_FAILED_CALLID, null, this, - null, 0); - failedCall.setResponse(null, Status.FATAL, ae.getClass().getName(), - ae.getMessage()); - responder.doRespond(failedCall); - throw ae; - } - if (!isSecurityEnabled && authMethod != AuthMethod.SIMPLE) { - doSaslReply(SaslStatus.SUCCESS, new IntWritable( - HBaseSaslRpcServer.SWITCH_TO_SIMPLE_AUTH), null, null); - authMethod = AuthMethod.SIMPLE; - // client has already sent the initial Sasl message and we - // should ignore it. Both client and server should fall back - // to simple auth from now on. - skipInitialSaslHandshake = true; - } - if (authMethod != AuthMethod.SIMPLE) { - useSasl = true; - } - - rpcHeaderBuffer = null; - rpcHeaderRead = true; - continue; - } - - if (data == null) { - dataLengthBuffer.flip(); - dataLength = dataLengthBuffer.getInt(); - - if (dataLength == HBaseClient.PING_CALL_ID) { - if(!useWrap) { //covers the !useSasl too - dataLengthBuffer.clear(); - return 0; //ping message - } - } - if (dataLength < 0) { - LOG.warn("Unexpected data length " + dataLength + "!! from " + - getHostAddress()); - } - data = ByteBuffer.allocate(dataLength); - incRpcCount(); // Increment the rpc count - } - - count = channelRead(channel, data); - - if (data.remaining() == 0) { - dataLengthBuffer.clear(); - data.flip(); - if (skipInitialSaslHandshake) { - data = null; - skipInitialSaslHandshake = false; - continue; - } - boolean isHeaderRead = headerRead; - if (useSasl) { - saslReadAndProcess(data.array()); - } else { - processOneRpc(data.array()); - } - data = null; - if (!isHeaderRead) { - continue; - } - } - return count; - } - } - - /// Reads the connection header following version - private void processHeader(byte[] buf) throws IOException { - DataInputStream in = - new DataInputStream(new ByteArrayInputStream(buf)); - header.readFields(in); - try { - String protocolClassName = header.getProtocol(); - if (protocolClassName != null) { - protocol = getProtocolClass(header.getProtocol(), conf); - } - } catch (ClassNotFoundException cnfe) { - throw new IOException("Unknown protocol: " + header.getProtocol()); - } - - User protocolUser = header.getUser(); - if (!useSasl) { - user = protocolUser; - if (user != null) { - user.getUGI().setAuthenticationMethod(AuthMethod.SIMPLE.authenticationMethod); - } - } else { - // user is authenticated - user.getUGI().setAuthenticationMethod(authMethod.authenticationMethod); - //Now we check if this is a proxy user case. If the protocol user is - //different from the 'user', it is a proxy user scenario. However, - //this is not allowed if user authenticated with DIGEST. - if ((protocolUser != null) - && (!protocolUser.getName().equals(user.getName()))) { - if (authMethod == AuthMethod.DIGEST) { - // Not allowed to doAs if token authentication is used - throw new AccessControlException("Authenticated user (" + user - + ") doesn't match what the client claims to be (" - + protocolUser + ")"); - } else { - // Effective user can be different from authenticated user - // for simple auth or kerberos auth - // The user is the real user. Now we create a proxy user - UserGroupInformation realUser = user.getUGI(); - user = User.create( - UserGroupInformation.createProxyUser(protocolUser.getName(), - realUser)); - // Now the user is a proxy user, set Authentication method Proxy. - user.getUGI().setAuthenticationMethod(AuthenticationMethod.PROXY); - } - } - } - } - - private void processUnwrappedData(byte[] inBuf) throws IOException, - InterruptedException { - ReadableByteChannel ch = Channels.newChannel(new ByteArrayInputStream( - inBuf)); - // Read all RPCs contained in the inBuf, even partial ones - while (true) { - int count = -1; - if (unwrappedDataLengthBuffer.remaining() > 0) { - count = channelRead(ch, unwrappedDataLengthBuffer); - if (count <= 0 || unwrappedDataLengthBuffer.remaining() > 0) - return; - } - - if (unwrappedData == null) { - unwrappedDataLengthBuffer.flip(); - int unwrappedDataLength = unwrappedDataLengthBuffer.getInt(); - - if (unwrappedDataLength == HBaseClient.PING_CALL_ID) { - if (LOG.isDebugEnabled()) - LOG.debug("Received ping message"); - unwrappedDataLengthBuffer.clear(); - continue; // ping message - } - unwrappedData = ByteBuffer.allocate(unwrappedDataLength); - } - - count = channelRead(ch, unwrappedData); - if (count <= 0 || unwrappedData.remaining() > 0) - return; - - if (unwrappedData.remaining() == 0) { - unwrappedDataLengthBuffer.clear(); - unwrappedData.flip(); - processOneRpc(unwrappedData.array()); - unwrappedData = null; - } - } - } - - private void processOneRpc(byte[] buf) throws IOException, - InterruptedException { - if (headerRead) { - processData(buf); - } else { - processHeader(buf); - headerRead = true; - if (!authorizeConnection()) { - throw new AccessControlException("Connection from " + this - + " for protocol " + header.getProtocol() - + " is unauthorized for user " + user); - } - } - } - - protected void processData(byte[] buf) throws IOException, InterruptedException { - DataInputStream dis = - new DataInputStream(new ByteArrayInputStream(buf)); - int id = dis.readInt(); // try to read an id - - if (LOG.isDebugEnabled()) { - LOG.debug(" got #" + id); - } - - Writable param = ReflectionUtils.newInstance(paramClass, conf); // read param - param.readFields(dis); - - SecureCall call = new SecureCall(id, param, this, responder, buf.length); - - if (priorityCallQueue != null && getQosLevel(param) > highPriorityLevel) { - priorityCallQueue.put(call); - } else { - callQueue.put(call); // queue the call; maybe blocked here - } - } - - private boolean authorizeConnection() throws IOException { - try { - // If auth method is DIGEST, the token was obtained by the - // real user for the effective user, therefore not required to - // authorize real user. doAs is allowed only for simple or kerberos - // authentication - if (user != null && user.getUGI().getRealUser() != null - && (authMethod != AuthMethod.DIGEST)) { - ProxyUsers.authorize(user.getUGI(), this.getHostAddress(), conf); - } - authorize(user, header, getHostInetAddress()); - if (LOG.isDebugEnabled()) { - LOG.debug("Successfully authorized " + header); - } - rpcMetrics.authorizationSuccesses.inc(); - } catch (AuthorizationException ae) { - LOG.debug("Connection authorization failed: "+ae.getMessage(), ae); - rpcMetrics.authorizationFailures.inc(); - SecureCall failedCall = new SecureCall(AUTHORIZATION_FAILED_CALLID, null, this, - null, 0); - failedCall.setResponse(null, Status.FATAL, ae.getClass().getName(), - ae.getMessage()); - responder.doRespond(failedCall); - return false; - } - return true; - } - - protected synchronized void close() { - disposeSasl(); - data = null; - dataLengthBuffer = null; - if (!channel.isOpen()) - return; - try {socket.shutdownOutput();} catch(Exception ignored) {} // FindBugs DE_MIGHT_IGNORE - if (channel.isOpen()) { - try {channel.close();} catch(Exception ignored) {} - } - try {socket.close();} catch(Exception ignored) {} - } - } - - /** Constructs a server listening on the named port and address. Parameters passed must - * be of the named class. The handlerCount determines - * the number of handler threads that will be used to process calls. - * - */ - @SuppressWarnings("unchecked") - protected SecureServer(String bindAddress, int port, - Class paramClass, int handlerCount, - int priorityHandlerCount, Configuration conf, String serverName, - int highPriorityLevel) - throws IOException { - super(bindAddress, port, paramClass, handlerCount, priorityHandlerCount, - conf, serverName, highPriorityLevel); - this.authorize = - conf.getBoolean(HADOOP_SECURITY_AUTHORIZATION, false); - this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled(); - LOG.debug("security enabled="+isSecurityEnabled); - - if (isSecurityEnabled) { - HBaseSaslRpcServer.init(conf); - } - } - - @Override - protected Connection getConnection(SocketChannel channel, long time) { - return new SecureConnection(channel, time); - } - - Configuration getConf() { - return conf; - } - - /** for unit testing only, should be called before server is started */ - void disableSecurity() { - this.isSecurityEnabled = false; - } - - /** for unit testing only, should be called before server is started */ - void enableSecurity() { - this.isSecurityEnabled = true; - } - - /** Stops the service. No new calls will be handled after this is called. */ - public synchronized void stop() { - super.stop(); - } - - public SecretManager getSecretManager() { - return this.secretManager; - } - - public void setSecretManager(SecretManager secretManager) { - this.secretManager = (SecretManager) secretManager; - } - - /** - * Authorize the incoming client connection. - * - * @param user client user - * @param connection incoming connection - * @param addr InetAddress of incoming connection - * @throws org.apache.hadoop.security.authorize.AuthorizationException when the client isn't authorized to talk the protocol - */ - public void authorize(User user, - ConnectionHeader connection, - InetAddress addr - ) throws AuthorizationException { - if (authorize) { - Class protocol = null; - try { - protocol = getProtocolClass(connection.getProtocol(), getConf()); - } catch (ClassNotFoundException cfne) { - throw new AuthorizationException("Unknown protocol: " + - connection.getProtocol()); - } - authManager.authorize(user != null ? user.getUGI() : null, - protocol, getConf(), addr); - } - } -} \ No newline at end of file Index: src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (revision 1326827) +++ src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java (working copy) @@ -73,7 +73,6 @@ import org.apache.hadoop.hbase.regionserver.MultiVersionConsistencyControl; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.StoreFile; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.JVMClusterUtil; @@ -88,6 +87,7 @@ import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.mapred.MiniMRCluster; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.mapred.TaskLog; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NodeExistsException; @@ -1676,18 +1676,19 @@ * @return A new configuration instance with a different user set into it. * @throws IOException */ - public static User getDifferentUser(final Configuration c, + public static UserGroupInformation getDifferentUser(final Configuration c, final String differentiatingSuffix) throws IOException { FileSystem currentfs = FileSystem.get(c); if (!(currentfs instanceof DistributedFileSystem)) { - return User.getCurrent(); + return UserGroupInformation.getCurrentUser(); } // Else distributed filesystem. Make a new instance per daemon. Below // code is taken from the AppendTestUtil over in hdfs. - String username = User.getCurrent().getName() + + String username = UserGroupInformation.getCurrentUser().getUserName() + differentiatingSuffix; - User user = User.createUserForTesting(c, username, + UserGroupInformation user = UserGroupInformation.createUserForTesting( + username, new String[]{"supergroup"}); return user; } Index: src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java (revision 1326827) +++ src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java (working copy) @@ -42,11 +42,11 @@ import org.apache.hadoop.hbase.regionserver.FlushRequester; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.Store; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdge; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.security.UserGroupInformation; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -211,9 +211,9 @@ // Now 'crash' the region by stealing its wal final Configuration newConf = HBaseConfiguration.create(this.conf); - User user = HBaseTestingUtility.getDifferentUser(newConf, + UserGroupInformation user = HBaseTestingUtility.getDifferentUser(newConf, tableNameStr); - user.runAs(new PrivilegedExceptionAction() { + user.doAs(new PrivilegedExceptionAction() { public Object run() throws Exception { runWALSplit(newConf); HLog wal2 = createWAL(newConf); @@ -304,9 +304,9 @@ // long gone. HBaseTestingUtility.setMaxRecoveryErrorCount(wal2.getOutputStream(), 1); final Configuration newConf = HBaseConfiguration.create(this.conf); - User user = HBaseTestingUtility.getDifferentUser(newConf, + UserGroupInformation user = HBaseTestingUtility.getDifferentUser(newConf, tableNameStr); - user.runAs(new PrivilegedExceptionAction() { + user.doAs(new PrivilegedExceptionAction() { public Object run() throws Exception { runWALSplit(newConf); FileSystem newFS = FileSystem.get(newConf); @@ -474,9 +474,9 @@ // Make a new conf and a new fs for the splitter to run on so we can take // over old wal. final Configuration newConf = HBaseConfiguration.create(this.conf); - User user = HBaseTestingUtility.getDifferentUser(newConf, + UserGroupInformation user = HBaseTestingUtility.getDifferentUser(newConf, ".replay.wal.secondtime"); - user.runAs(new PrivilegedExceptionAction() { + user.doAs(new PrivilegedExceptionAction() { public Object run() throws Exception { runWALSplit(newConf); FileSystem newFS = FileSystem.get(newConf); Index: src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (revision 1326827) +++ src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (working copy) @@ -56,11 +56,11 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.wal.HLog; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; import org.apache.hadoop.hbase.util.ManualEnvironmentEdge; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Progressable; import org.junit.experimental.categories.Category; import org.mockito.Mockito; @@ -581,12 +581,12 @@ LOG.info("Setting up a faulty file system that cannot write"); final Configuration conf = HBaseConfiguration.create(); - User user = User.createUserForTesting(conf, + UserGroupInformation user = UserGroupInformation.createUserForTesting( "testhandleerrorsinflush", new String[]{"foo"}); // Inject our faulty LocalFileSystem conf.setClass("fs.file.impl", FaultyFileSystem.class, FileSystem.class); - user.runAs(new PrivilegedExceptionAction() { + user.doAs(new PrivilegedExceptionAction() { public Object run() throws Exception { // Make sure it worked (above is sensitive to caching details in hadoop core) FileSystem fs = FileSystem.get(conf); Index: src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java (revision 1326827) +++ src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java (working copy) @@ -34,11 +34,11 @@ import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.io.MapWritable; +import org.apache.hadoop.security.UserGroupInformation; /** * This class creates a single process HBase cluster. @@ -92,13 +92,13 @@ */ public static class MiniHBaseClusterRegionServer extends HRegionServer { private Thread shutdownThread = null; - private User user = null; + private UserGroupInformation user = null; public static boolean TEST_SKIP_CLOSE = false; public MiniHBaseClusterRegionServer(Configuration conf) throws IOException, InterruptedException { super(conf); - this.user = User.getCurrent(); + this.user = UserGroupInformation.getCurrentUser(); } /* @@ -119,7 +119,7 @@ @Override public void run() { try { - this.user.runAs(new PrivilegedAction(){ + this.user.doAs(new PrivilegedAction(){ public Object run() { runRegionServer(); return null; @@ -146,7 +146,7 @@ } public void abort(final String reason, final Throwable cause) { - this.user.runAs(new PrivilegedAction() { + this.user.doAs(new PrivilegedAction() { public Object run() { abortRegionServer(reason, cause); return null; @@ -192,7 +192,7 @@ // manually add the regionservers as other users for (int i=0; i() { public Object run() throws Exception { Path p = runWALSplit(newConf); LOG.info("WALSplit path == " + p); Index: src/test/java/org/apache/hadoop/hbase/security/TestUser.java =================================================================== --- src/test/java/org/apache/hadoop/hbase/security/TestUser.java (revision 1326827) +++ src/test/java/org/apache/hadoop/hbase/security/TestUser.java (working copy) @@ -1,112 +0,0 @@ -/* - * Copyright 2010 The Apache Software Foundation - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.security; - -import static org.junit.Assert.*; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.SmallTests; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -import java.io.IOException; -import java.security.PrivilegedAction; -import java.security.PrivilegedExceptionAction; - -@Category(SmallTests.class) -public class TestUser { - private static Log LOG = LogFactory.getLog(TestUser.class); - - @Test - public void testBasicAttributes() throws Exception { - Configuration conf = HBaseConfiguration.create(); - User user = User.createUserForTesting(conf, "simple", new String[]{"foo"}); - assertEquals("Username should match", "simple", user.getName()); - assertEquals("Short username should match", "simple", user.getShortName()); - // don't test shortening of kerberos names because regular Hadoop doesn't support them - } - - @Test - public void testRunAs() throws Exception { - Configuration conf = HBaseConfiguration.create(); - final User user = User.createUserForTesting(conf, "testuser", new String[]{"foo"}); - final PrivilegedExceptionAction action = new PrivilegedExceptionAction(){ - public String run() throws IOException { - User u = User.getCurrent(); - return u.getName(); - } - }; - - String username = user.runAs(action); - assertEquals("Current user within runAs() should match", - "testuser", username); - - // ensure the next run is correctly set - User user2 = User.createUserForTesting(conf, "testuser2", new String[]{"foo"}); - String username2 = user2.runAs(action); - assertEquals("Second username should match second user", - "testuser2", username2); - - // check the exception version - username = user.runAs(new PrivilegedExceptionAction(){ - public String run() throws Exception { - return User.getCurrent().getName(); - } - }); - assertEquals("User name in runAs() should match", "testuser", username); - - // verify that nested contexts work - user2.runAs(new PrivilegedExceptionAction(){ - public Object run() throws IOException, InterruptedException{ - String nestedName = user.runAs(action); - assertEquals("Nest name should match nested user", "testuser", nestedName); - assertEquals("Current name should match current user", - "testuser2", User.getCurrent().getName()); - return null; - } - }); - } - - /** - * Make sure that we're returning a result for the current user. - * Previously getCurrent() was returning null if not initialized on - * non-secure Hadoop variants. - */ - @Test - public void testGetCurrent() throws Exception { - User user1 = User.getCurrent(); - assertNotNull(user1.ugi); - LOG.debug("User1 is "+user1.getName()); - - for (int i =0 ; i< 100; i++) { - User u = User.getCurrent(); - assertNotNull(u); - assertEquals(user1.getName(), u.getName()); - } - } - - @org.junit.Rule - public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = - new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); -} - Index: src/main/protobuf/RPC.proto =================================================================== --- src/main/protobuf/RPC.proto (revision 1326827) +++ src/main/protobuf/RPC.proto (working copy) @@ -94,9 +94,14 @@ /** Echo back the callId the client sent */ required int32 callId = 1; /** Did the RPC execution encounter an error at the server */ - required bool error = 2; + enum Status { + SUCCESS = 0; + ERROR = 1; + FATAL = 2; + } + required Status status = 2; /** Optional response bytes */ optional bytes response = 3; /** Optional exception when error is true*/ optional RpcException exception = 4; -} +} \ No newline at end of file Index: src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/protobuf/generated/RPCProtos.java (working copy) @@ -2081,9 +2081,9 @@ boolean hasCallId(); int getCallId(); - // required bool error = 2; - boolean hasError(); - boolean getError(); + // required .RpcResponse.Status status = 2; + boolean hasStatus(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status getStatus(); // optional bytes response = 3; boolean hasResponse(); @@ -2122,6 +2122,78 @@ return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.internal_static_RpcResponse_fieldAccessorTable; } + public enum Status + implements com.google.protobuf.ProtocolMessageEnum { + SUCCESS(0, 0), + ERROR(1, 1), + FATAL(2, 2), + ; + + public static final int SUCCESS_VALUE = 0; + public static final int ERROR_VALUE = 1; + public static final int FATAL_VALUE = 2; + + + public final int getNumber() { return value; } + + public static Status valueOf(int value) { + switch (value) { + case 0: return SUCCESS; + case 1: return ERROR; + case 2: return FATAL; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Status findValueByNumber(int number) { + return Status.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.getDescriptor().getEnumTypes().get(0); + } + + private static final Status[] VALUES = { + SUCCESS, ERROR, FATAL, + }; + + public static Status valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private Status(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:RpcResponse.Status) + } + private int bitField0_; // required int32 callId = 1; public static final int CALLID_FIELD_NUMBER = 1; @@ -2133,14 +2205,14 @@ return callId_; } - // required bool error = 2; - public static final int ERROR_FIELD_NUMBER = 2; - private boolean error_; - public boolean hasError() { + // required .RpcResponse.Status status = 2; + public static final int STATUS_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status status_; + public boolean hasStatus() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public boolean getError() { - return error_; + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status getStatus() { + return status_; } // optional bytes response = 3; @@ -2168,7 +2240,7 @@ private void initFields() { callId_ = 0; - error_ = false; + status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status.SUCCESS; response_ = com.google.protobuf.ByteString.EMPTY; exception_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException.getDefaultInstance(); } @@ -2181,7 +2253,7 @@ memoizedIsInitialized = 0; return false; } - if (!hasError()) { + if (!hasStatus()) { memoizedIsInitialized = 0; return false; } @@ -2202,7 +2274,7 @@ output.writeInt32(1, callId_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBool(2, error_); + output.writeEnum(2, status_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, response_); @@ -2225,7 +2297,7 @@ } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, error_); + .computeEnumSize(2, status_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream @@ -2263,10 +2335,10 @@ result = result && (getCallId() == other.getCallId()); } - result = result && (hasError() == other.hasError()); - if (hasError()) { - result = result && (getError() - == other.getError()); + result = result && (hasStatus() == other.hasStatus()); + if (hasStatus()) { + result = result && + (getStatus() == other.getStatus()); } result = result && (hasResponse() == other.hasResponse()); if (hasResponse()) { @@ -2291,9 +2363,9 @@ hash = (37 * hash) + CALLID_FIELD_NUMBER; hash = (53 * hash) + getCallId(); } - if (hasError()) { - hash = (37 * hash) + ERROR_FIELD_NUMBER; - hash = (53 * hash) + hashBoolean(getError()); + if (hasStatus()) { + hash = (37 * hash) + STATUS_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getStatus()); } if (hasResponse()) { hash = (37 * hash) + RESPONSE_FIELD_NUMBER; @@ -2422,7 +2494,7 @@ super.clear(); callId_ = 0; bitField0_ = (bitField0_ & ~0x00000001); - error_ = false; + status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status.SUCCESS; bitField0_ = (bitField0_ & ~0x00000002); response_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); @@ -2477,7 +2549,7 @@ if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } - result.error_ = error_; + result.status_ = status_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } @@ -2509,8 +2581,8 @@ if (other.hasCallId()) { setCallId(other.getCallId()); } - if (other.hasError()) { - setError(other.getError()); + if (other.hasStatus()) { + setStatus(other.getStatus()); } if (other.hasResponse()) { setResponse(other.getResponse()); @@ -2527,7 +2599,7 @@ return false; } - if (!hasError()) { + if (!hasStatus()) { return false; } @@ -2569,8 +2641,14 @@ break; } case 16: { - bitField0_ |= 0x00000002; - error_ = input.readBool(); + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status value = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + status_ = value; + } break; } case 26: { @@ -2614,23 +2692,26 @@ return this; } - // required bool error = 2; - private boolean error_ ; - public boolean hasError() { + // required .RpcResponse.Status status = 2; + private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status.SUCCESS; + public boolean hasStatus() { return ((bitField0_ & 0x00000002) == 0x00000002); } - public boolean getError() { - return error_; + public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status getStatus() { + return status_; } - public Builder setError(boolean value) { + public Builder setStatus(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status value) { + if (value == null) { + throw new NullPointerException(); + } bitField0_ |= 0x00000002; - error_ = value; + status_ = value; onChanged(); return this; } - public Builder clearError() { + public Builder clearStatus() { bitField0_ = (bitField0_ & ~0x00000002); - error_ = false; + status_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status.SUCCESS; onChanged(); return this; } @@ -2801,11 +2882,13 @@ "doop.hbase.ipc.HRegionInterface\"-\n\nRpcRe" + "quest\022\016\n\006callId\030\001 \002(\005\022\017\n\007request\030\002 \001(\014\"9" + "\n\014RpcException\022\025\n\rexceptionName\030\001 \002(\t\022\022\n" + - "\nstackTrace\030\002 \001(\t\"`\n\013RpcResponse\022\016\n\006call" + - "Id\030\001 \002(\005\022\r\n\005error\030\002 \002(\010\022\020\n\010response\030\003 \001(" + - "\014\022 \n\texception\030\004 \001(\0132\r.RpcExceptionB<\n*o", - "rg.apache.hadoop.hbase.protobuf.generate" + - "dB\tRPCProtosH\001\240\001\001" + "\nstackTrace\030\002 \001(\t\"\243\001\n\013RpcResponse\022\016\n\006cal" + + "lId\030\001 \002(\005\022#\n\006status\030\002 \002(\0162\023.RpcResponse." + + "Status\022\020\n\010response\030\003 \001(\014\022 \n\texception\030\004 ", + "\001(\0132\r.RpcException\"+\n\006Status\022\013\n\007SUCCESS\020" + + "\000\022\t\n\005ERROR\020\001\022\t\n\005FATAL\020\002B<\n*org.apache.ha" + + "doop.hbase.protobuf.generatedB\tRPCProtos" + + "H\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -2849,7 +2932,7 @@ internal_static_RpcResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RpcResponse_descriptor, - new java.lang.String[] { "CallId", "Error", "Response", "Exception", }, + new java.lang.String[] { "CallId", "Status", "Response", "Exception", }, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.class, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Builder.class); return null; Index: src/main/java/org/apache/hadoop/hbase/rest/Main.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/rest/Main.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/rest/Main.java (working copy) @@ -32,10 +32,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.rest.filter.GzipFilter; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.net.DNS; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; import java.util.List; import java.util.ArrayList; @@ -150,13 +151,12 @@ context.addFilter(GzipFilter.class, "/*", 0); // login the server principal (if using secure Hadoop) - if (User.isSecurityEnabled() && User.isHBaseSecurityEnabled(conf)) { - String machineName = Strings.domainNamePointerToHostName( - DNS.getDefaultHost(conf.get("hbase.rest.dns.interface", "default"), - conf.get("hbase.rest.dns.nameserver", "default"))); - User.login(conf, "hbase.rest.keytab.file", "hbase.rest.kerberos.principal", - machineName); - } + String machineName = Strings.domainNamePointerToHostName( + DNS.getDefaultHost(conf.get("hbase.rest.dns.interface", "default"), + conf.get("hbase.rest.dns.nameserver", "default"))); + SecurityUtil.login(conf, "hbase.rest.keytab.file", + "hbase.rest.kerberos.principal", machineName); + // start server server.start(); Index: src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java (working copy) @@ -39,9 +39,9 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.regionserver.StoreFile; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableComparable; +import org.apache.hadoop.security.UserGroupInformation; /** * HTableDescriptor contains the details about an HBase table such as the descriptors of @@ -1177,8 +1177,8 @@ .setScope(HConstants.REPLICATION_SCOPE_LOCAL) }); - public void setOwner(User owner) { - setOwnerString(owner != null ? owner.getShortName() : null); + public void setOwner(UserGroupInformation owner) { + setOwnerString(owner != null ? owner.getShortUserName() : null); } // used by admin.rb:alter(table_name,*args) to update owner. Index: src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java (working copy) @@ -32,7 +32,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.regionserver.HRegionServer; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import org.apache.hadoop.hbase.util.Threads; @@ -40,6 +39,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.util.JVMClusterUtil; +import org.apache.hadoop.security.UserGroupInformation; /** * This class creates a single process HBase cluster. One thread is created for @@ -180,9 +180,9 @@ } public JVMClusterUtil.RegionServerThread addRegionServer( - final Configuration config, final int index, User user) + final Configuration config, final int index, UserGroupInformation user) throws IOException, InterruptedException { - return user.runAs( + return user.doAs( new PrivilegedExceptionAction() { public JVMClusterUtil.RegionServerThread run() throws Exception { return addRegionServer(config, index); @@ -207,9 +207,9 @@ } public JVMClusterUtil.MasterThread addMaster( - final Configuration c, final int index, User user) + final Configuration c, final int index, UserGroupInformation user) throws IOException, InterruptedException { - return user.runAs( + return user.doAs( new PrivilegedExceptionAction() { public JVMClusterUtil.MasterThread run() throws Exception { return addMaster(c, index); Index: src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java (revision 0) +++ src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java (revision 0) @@ -0,0 +1,278 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security; + +import java.io.ByteArrayInputStream; +import java.io.DataInput; +import java.io.DataInputStream; +import java.io.DataOutput; +import java.io.IOException; +import java.util.Map; +import java.util.TreeMap; + +import javax.security.auth.callback.Callback; +import javax.security.auth.callback.CallbackHandler; +import javax.security.auth.callback.NameCallback; +import javax.security.auth.callback.PasswordCallback; +import javax.security.auth.callback.UnsupportedCallbackException; +import javax.security.sasl.AuthorizeCallback; +import javax.security.sasl.RealmCallback; +import javax.security.sasl.Sasl; + +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.ipc.HBaseServer; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.SecretManager; +import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; +import org.apache.hadoop.security.token.SecretManager.InvalidToken; + +/** + * A utility class for dealing with SASL on RPC server + */ +public class HBaseSaslRpcServer { + public static final Log LOG = LogFactory.getLog(HBaseSaslRpcServer.class); + public static final String SASL_DEFAULT_REALM = "default"; + public static final Map SASL_PROPS = + new TreeMap(); + + public static final int SWITCH_TO_SIMPLE_AUTH = -88; + + public static enum QualityOfProtection { + AUTHENTICATION("auth"), + INTEGRITY("auth-int"), + PRIVACY("auth-conf"); + + public final String saslQop; + + private QualityOfProtection(String saslQop) { + this.saslQop = saslQop; + } + + public String getSaslQop() { + return saslQop; + } + } + + public static void init(Configuration conf) { + QualityOfProtection saslQOP = QualityOfProtection.AUTHENTICATION; + String rpcProtection = conf.get("hbase.rpc.protection", + QualityOfProtection.AUTHENTICATION.name().toLowerCase()); + if (QualityOfProtection.INTEGRITY.name().toLowerCase() + .equals(rpcProtection)) { + saslQOP = QualityOfProtection.INTEGRITY; + } else if (QualityOfProtection.PRIVACY.name().toLowerCase().equals( + rpcProtection)) { + saslQOP = QualityOfProtection.PRIVACY; + } + + SASL_PROPS.put(Sasl.QOP, saslQOP.getSaslQop()); + SASL_PROPS.put(Sasl.SERVER_AUTH, "true"); + } + + static String encodeIdentifier(byte[] identifier) { + return new String(Base64.encodeBase64(identifier)); + } + + static byte[] decodeIdentifier(String identifier) { + return Base64.decodeBase64(identifier.getBytes()); + } + + public static T getIdentifier(String id, + SecretManager secretManager) throws InvalidToken { + byte[] tokenId = decodeIdentifier(id); + T tokenIdentifier = secretManager.createIdentifier(); + try { + tokenIdentifier.readFields(new DataInputStream(new ByteArrayInputStream( + tokenId))); + } catch (IOException e) { + throw (InvalidToken) new InvalidToken( + "Can't de-serialize tokenIdentifier").initCause(e); + } + return tokenIdentifier; + } + + static char[] encodePassword(byte[] password) { + return new String(Base64.encodeBase64(password)).toCharArray(); + } + + /** Splitting fully qualified Kerberos name into parts */ + public static String[] splitKerberosName(String fullName) { + return fullName.split("[/@]"); + } + + public enum SaslStatus { + SUCCESS (0), + ERROR (1); + + public final int state; + private SaslStatus(int state) { + this.state = state; + } + } + + /** Authentication method */ + public static enum AuthMethod { + SIMPLE((byte) 80, "", AuthenticationMethod.SIMPLE), + KERBEROS((byte) 81, "GSSAPI", AuthenticationMethod.KERBEROS), + DIGEST((byte) 82, "DIGEST-MD5", AuthenticationMethod.TOKEN); + + /** The code for this method. */ + public final byte code; + public final String mechanismName; + public final AuthenticationMethod authenticationMethod; + + private AuthMethod(byte code, String mechanismName, + AuthenticationMethod authMethod) { + this.code = code; + this.mechanismName = mechanismName; + this.authenticationMethod = authMethod; + } + + private static final int FIRST_CODE = values()[0].code; + + /** Return the object represented by the code. */ + private static AuthMethod valueOf(byte code) { + final int i = (code & 0xff) - FIRST_CODE; + return i < 0 || i >= values().length ? null : values()[i]; + } + + /** Return the SASL mechanism name */ + public String getMechanismName() { + return mechanismName; + } + + /** Read from in */ + public static AuthMethod read(DataInput in) throws IOException { + return valueOf(in.readByte()); + } + + /** Write to out */ + public void write(DataOutput out) throws IOException { + out.write(code); + } + }; + + /** CallbackHandler for SASL DIGEST-MD5 mechanism */ + public static class SaslDigestCallbackHandler implements CallbackHandler { + private SecretManager secretManager; + private HBaseServer.Connection connection; + + public SaslDigestCallbackHandler( + SecretManager secretManager, + HBaseServer.Connection connection) { + this.secretManager = secretManager; + this.connection = connection; + } + + private char[] getPassword(TokenIdentifier tokenid) throws InvalidToken { + return encodePassword(secretManager.retrievePassword(tokenid)); + } + + /** {@inheritDoc} */ + @Override + public void handle(Callback[] callbacks) throws InvalidToken, + UnsupportedCallbackException { + NameCallback nc = null; + PasswordCallback pc = null; + AuthorizeCallback ac = null; + for (Callback callback : callbacks) { + if (callback instanceof AuthorizeCallback) { + ac = (AuthorizeCallback) callback; + } else if (callback instanceof NameCallback) { + nc = (NameCallback) callback; + } else if (callback instanceof PasswordCallback) { + pc = (PasswordCallback) callback; + } else if (callback instanceof RealmCallback) { + continue; // realm is ignored + } else { + throw new UnsupportedCallbackException(callback, + "Unrecognized SASL DIGEST-MD5 Callback"); + } + } + if (pc != null) { + TokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(), secretManager); + char[] password = getPassword(tokenIdentifier); + UserGroupInformation user = null; + user = tokenIdentifier.getUser(); // may throw exception + connection.attemptingUser = user; + if (LOG.isDebugEnabled()) { + LOG.debug("SASL server DIGEST-MD5 callback: setting password " + + "for client: " + tokenIdentifier.getUser()); + } + pc.setPassword(password); + } + if (ac != null) { + String authid = ac.getAuthenticationID(); + String authzid = ac.getAuthorizationID(); + if (authid.equals(authzid)) { + ac.setAuthorized(true); + } else { + ac.setAuthorized(false); + } + if (ac.isAuthorized()) { + if (LOG.isDebugEnabled()) { + String username = + getIdentifier(authzid, secretManager).getUser().getUserName(); + LOG.debug("SASL server DIGEST-MD5 callback: setting " + + "canonicalized client ID: " + username); + } + ac.setAuthorizedID(authzid); + } + } + } + } + + /** CallbackHandler for SASL GSSAPI Kerberos mechanism */ + public static class SaslGssCallbackHandler implements CallbackHandler { + + /** {@inheritDoc} */ + @Override + public void handle(Callback[] callbacks) throws + UnsupportedCallbackException { + AuthorizeCallback ac = null; + for (Callback callback : callbacks) { + if (callback instanceof AuthorizeCallback) { + ac = (AuthorizeCallback) callback; + } else { + throw new UnsupportedCallbackException(callback, + "Unrecognized SASL GSSAPI Callback"); + } + } + if (ac != null) { + String authid = ac.getAuthenticationID(); + String authzid = ac.getAuthorizationID(); + if (authid.equals(authzid)) { + ac.setAuthorized(true); + } else { + ac.setAuthorized(false); + } + if (ac.isAuthorized()) { + if (LOG.isDebugEnabled()) + LOG.debug("SASL server GSSAPI callback: setting " + + "canonicalized client ID: " + authzid); + ac.setAuthorizedID(authzid); + } + } + } + } +} Index: src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java (revision 0) +++ src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java (revision 0) @@ -0,0 +1,280 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.security; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +import javax.security.auth.callback.Callback; +import javax.security.auth.callback.CallbackHandler; +import javax.security.auth.callback.NameCallback; +import javax.security.auth.callback.PasswordCallback; +import javax.security.auth.callback.UnsupportedCallbackException; +import javax.security.sasl.RealmCallback; +import javax.security.sasl.RealmChoiceCallback; +import javax.security.sasl.Sasl; +import javax.security.sasl.SaslException; +import javax.security.sasl.SaslClient; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.io.WritableUtils; +import org.apache.hadoop.ipc.RemoteException; +import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.AuthMethod; +import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslStatus; +import org.apache.hadoop.security.SaslInputStream; +import org.apache.hadoop.security.SaslOutputStream; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.TokenIdentifier; + +/** + * A utility class that encapsulates SASL logic for RPC client. + * Copied from org.apache.hadoop.security + */ +public class HBaseSaslRpcClient { + public static final Log LOG = LogFactory.getLog(HBaseSaslRpcClient.class); + + private final SaslClient saslClient; + + /** + * Create a HBaseSaslRpcClient for an authentication method + * + * @param method + * the requested authentication method + * @param token + * token to use if needed by the authentication method + */ + public HBaseSaslRpcClient(AuthMethod method, + Token token, String serverPrincipal) + throws IOException { + switch (method) { + case DIGEST: + if (LOG.isDebugEnabled()) + LOG.debug("Creating SASL " + AuthMethod.DIGEST.getMechanismName() + + " client to authenticate to service at " + token.getService()); + saslClient = Sasl.createSaslClient(new String[] { AuthMethod.DIGEST + .getMechanismName() }, null, null, HBaseSaslRpcServer.SASL_DEFAULT_REALM, + HBaseSaslRpcServer.SASL_PROPS, new SaslClientCallbackHandler(token)); + break; + case KERBEROS: + if (LOG.isDebugEnabled()) { + LOG + .debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName() + + " client. Server's Kerberos principal name is " + + serverPrincipal); + } + if (serverPrincipal == null || serverPrincipal.length() == 0) { + throw new IOException( + "Failed to specify server's Kerberos principal name"); + } + String names[] = HBaseSaslRpcServer.splitKerberosName(serverPrincipal); + if (names.length != 3) { + throw new IOException( + "Kerberos principal name does NOT have the expected hostname part: " + + serverPrincipal); + } + saslClient = Sasl.createSaslClient(new String[] { AuthMethod.KERBEROS + .getMechanismName() }, null, names[0], names[1], + HBaseSaslRpcServer.SASL_PROPS, null); + break; + default: + throw new IOException("Unknown authentication method " + method); + } + if (saslClient == null) + throw new IOException("Unable to find SASL client implementation"); + } + + private static void readStatus(DataInputStream inStream) throws IOException { + int id = inStream.readInt(); // read and discard dummy id + int status = inStream.readInt(); // read status + if (status != SaslStatus.SUCCESS.state) { + throw new RemoteException(WritableUtils.readString(inStream), + WritableUtils.readString(inStream)); + } + } + + /** + * Do client side SASL authentication with server via the given InputStream + * and OutputStream + * + * @param inS + * InputStream to use + * @param outS + * OutputStream to use + * @return true if connection is set up, or false if needs to switch + * to simple Auth. + * @throws IOException + */ + public boolean saslConnect(InputStream inS, OutputStream outS) + throws IOException { + DataInputStream inStream = new DataInputStream(new BufferedInputStream(inS)); + DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream( + outS)); + + try { + byte[] saslToken = new byte[0]; + if (saslClient.hasInitialResponse()) + saslToken = saslClient.evaluateChallenge(saslToken); + if (saslToken != null) { + outStream.writeInt(saslToken.length); + outStream.write(saslToken, 0, saslToken.length); + outStream.flush(); + if (LOG.isDebugEnabled()) + LOG.debug("Have sent token of size " + saslToken.length + + " from initSASLContext."); + } + if (!saslClient.isComplete()) { + readStatus(inStream); + int len = inStream.readInt(); + if (len == HBaseSaslRpcServer.SWITCH_TO_SIMPLE_AUTH) { + if (LOG.isDebugEnabled()) + LOG.debug("Server asks us to fall back to simple auth."); + saslClient.dispose(); + return false; + } + saslToken = new byte[len]; + if (LOG.isDebugEnabled()) + LOG.debug("Will read input token of size " + saslToken.length + + " for processing by initSASLContext"); + inStream.readFully(saslToken); + } + + while (!saslClient.isComplete()) { + saslToken = saslClient.evaluateChallenge(saslToken); + if (saslToken != null) { + if (LOG.isDebugEnabled()) + LOG.debug("Will send token of size " + saslToken.length + + " from initSASLContext."); + outStream.writeInt(saslToken.length); + outStream.write(saslToken, 0, saslToken.length); + outStream.flush(); + } + if (!saslClient.isComplete()) { + readStatus(inStream); + saslToken = new byte[inStream.readInt()]; + if (LOG.isDebugEnabled()) + LOG.debug("Will read input token of size " + saslToken.length + + " for processing by initSASLContext"); + inStream.readFully(saslToken); + } + } + if (LOG.isDebugEnabled()) { + LOG.debug("SASL client context established. Negotiated QoP: " + + saslClient.getNegotiatedProperty(Sasl.QOP)); + } + return true; + } catch (IOException e) { + try { + saslClient.dispose(); + } catch (SaslException ignored) { + // ignore further exceptions during cleanup + } + throw e; + } + } + + /** + * Get a SASL wrapped InputStream. Can be called only after saslConnect() has + * been called. + * + * @param in + * the InputStream to wrap + * @return a SASL wrapped InputStream + * @throws IOException + */ + public InputStream getInputStream(InputStream in) throws IOException { + if (!saslClient.isComplete()) { + throw new IOException("Sasl authentication exchange hasn't completed yet"); + } + return new SaslInputStream(in, saslClient); + } + + /** + * Get a SASL wrapped OutputStream. Can be called only after saslConnect() has + * been called. + * + * @param out + * the OutputStream to wrap + * @return a SASL wrapped OutputStream + * @throws IOException + */ + public OutputStream getOutputStream(OutputStream out) throws IOException { + if (!saslClient.isComplete()) { + throw new IOException("Sasl authentication exchange hasn't completed yet"); + } + return new SaslOutputStream(out, saslClient); + } + + /** Release resources used by wrapped saslClient */ + public void dispose() throws SaslException { + saslClient.dispose(); + } + + private static class SaslClientCallbackHandler implements CallbackHandler { + private final String userName; + private final char[] userPassword; + + public SaslClientCallbackHandler(Token token) { + this.userName = HBaseSaslRpcServer.encodeIdentifier(token.getIdentifier()); + this.userPassword = HBaseSaslRpcServer.encodePassword(token.getPassword()); + } + + public void handle(Callback[] callbacks) + throws UnsupportedCallbackException { + NameCallback nc = null; + PasswordCallback pc = null; + RealmCallback rc = null; + for (Callback callback : callbacks) { + if (callback instanceof RealmChoiceCallback) { + continue; + } else if (callback instanceof NameCallback) { + nc = (NameCallback) callback; + } else if (callback instanceof PasswordCallback) { + pc = (PasswordCallback) callback; + } else if (callback instanceof RealmCallback) { + rc = (RealmCallback) callback; + } else { + throw new UnsupportedCallbackException(callback, + "Unrecognized SASL client callback"); + } + } + if (nc != null) { + if (LOG.isDebugEnabled()) + LOG.debug("SASL client callback: setting username: " + userName); + nc.setName(userName); + } + if (pc != null) { + if (LOG.isDebugEnabled()) + LOG.debug("SASL client callback: setting userPassword"); + pc.setPassword(userPassword); + } + if (rc != null) { + if (LOG.isDebugEnabled()) + LOG.debug("SASL client callback: setting realm: " + + rc.getDefaultText()); + rc.setText(rc.getDefaultText()); + } + } + } +} Index: src/main/java/org/apache/hadoop/hbase/security/User.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/security/User.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/security/User.java (working copy) @@ -1,620 +0,0 @@ -/* - * Copyright 2010 The Apache Software Foundation - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hbase.security; - -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation; -import org.apache.hadoop.hbase.util.Methods; -import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.security.UserGroupInformation; - -import java.io.IOException; -import java.lang.reflect.Constructor; -import java.lang.reflect.UndeclaredThrowableException; -import java.security.PrivilegedAction; -import java.security.PrivilegedExceptionAction; - -import org.apache.commons.logging.Log; - -/** - * Wrapper to abstract out usage of user and group information in HBase. - * - *

- * This class provides a common interface for interacting with user and group - * information across changing APIs in different versions of Hadoop. It only - * provides access to the common set of functionality in - * {@link org.apache.hadoop.security.UserGroupInformation} currently needed by - * HBase, but can be extended as needs change. - *

- */ -@InterfaceAudience.Private -public abstract class User { - public static final String HBASE_SECURITY_CONF_KEY = - "hbase.security.authentication"; - - /** - * Flag to differentiate between API-incompatible changes to - * {@link org.apache.hadoop.security.UserGroupInformation} between vanilla - * Hadoop 0.20.x and secure Hadoop 0.20+. - */ - private static boolean IS_SECURE_HADOOP = true; - static { - try { - UserGroupInformation.class.getMethod("isSecurityEnabled"); - } catch (NoSuchMethodException nsme) { - IS_SECURE_HADOOP = false; - } - } - private static Log LOG = LogFactory.getLog(User.class); - - protected UserGroupInformation ugi; - - public UserGroupInformation getUGI() { - return ugi; - } - - /** - * Returns the full user name. For Kerberos principals this will include - * the host and realm portions of the principal name. - * @return User full name. - */ - public String getName() { - return ugi.getUserName(); - } - - /** - * Returns the list of groups of which this user is a member. On secure - * Hadoop this returns the group information for the user as resolved on the - * server. For 0.20 based Hadoop, the group names are passed from the client. - */ - public String[] getGroupNames() { - return ugi.getGroupNames(); - } - - /** - * Returns the shortened version of the user name -- the portion that maps - * to an operating system user name. - * @return Short name - */ - public abstract String getShortName(); - - /** - * Executes the given action within the context of this user. - */ - public abstract T runAs(PrivilegedAction action); - - /** - * Executes the given action within the context of this user. - */ - public abstract T runAs(PrivilegedExceptionAction action) - throws IOException, InterruptedException; - - /** - * Requests an authentication token for this user and stores it in the - * user's credentials. - * - * @throws IOException - */ - public abstract void obtainAuthTokenForJob(Configuration conf, Job job) - throws IOException, InterruptedException; - - /** - * Requests an authentication token for this user and stores it in the - * user's credentials. - * - * @throws IOException - */ - public abstract void obtainAuthTokenForJob(JobConf job) - throws IOException, InterruptedException; - - public String toString() { - return ugi.toString(); - } - - /** - * Returns the {@code User} instance within current execution context. - */ - public static User getCurrent() throws IOException { - User user; - if (IS_SECURE_HADOOP) { - user = new SecureHadoopUser(); - } else { - user = new HadoopUser(); - } - if (user.getUGI() == null) { - return null; - } - return user; - } - - /** - * Wraps an underlying {@code UserGroupInformation} instance. - * @param ugi The base Hadoop user - * @return User - */ - public static User create(UserGroupInformation ugi) { - if (ugi == null) { - return null; - } - - if (IS_SECURE_HADOOP) { - return new SecureHadoopUser(ugi); - } - return new HadoopUser(ugi); - } - - public static User createUser(ConnectionHeader head) { - UserGroupInformation ugi = null; - - if (!head.hasUserInfo()) { - return create(null); - } - UserInformation userInfoProto = head.getUserInfo(); - String effectiveUser = null; - if (userInfoProto.hasEffectiveUser()) { - effectiveUser = userInfoProto.getEffectiveUser(); - } - String realUser = null; - if (userInfoProto.hasRealUser()) { - realUser = userInfoProto.getRealUser(); - } - if (effectiveUser != null) { - if (realUser != null) { - UserGroupInformation realUserUgi = - UserGroupInformation.createRemoteUser(realUser); - ugi = UserGroupInformation.createProxyUser(effectiveUser, realUserUgi); - } else { - ugi = UserGroupInformation.createRemoteUser(effectiveUser); - } - } - return create(ugi); - } - - /** - * Generates a new {@code User} instance specifically for use in test code. - * @param name the full username - * @param groups the group names to which the test user will belong - * @return a new User instance - */ - public static User createUserForTesting(Configuration conf, - String name, String[] groups) { - if (IS_SECURE_HADOOP) { - return SecureHadoopUser.createUserForTesting(conf, name, groups); - } - return HadoopUser.createUserForTesting(conf, name, groups); - } - - /** - * Log in the current process using the given configuration keys for the - * credential file and login principal. - * - *

This is only applicable when - * running on secure Hadoop -- see - * org.apache.hadoop.security.SecurityUtil#login(Configuration,String,String,String). - * On regular Hadoop (without security features), this will safely be ignored. - *

- * - * @param conf The configuration data to use - * @param fileConfKey Property key used to configure path to the credential file - * @param principalConfKey Property key used to configure login principal - * @param localhost Current hostname to use in any credentials - * @throws IOException underlying exception from SecurityUtil.login() call - */ - public static void login(Configuration conf, String fileConfKey, - String principalConfKey, String localhost) throws IOException { - if (IS_SECURE_HADOOP) { - SecureHadoopUser.login(conf, fileConfKey, principalConfKey, localhost); - } else { - HadoopUser.login(conf, fileConfKey, principalConfKey, localhost); - } - } - - /** - * Returns whether or not Kerberos authentication is configured for Hadoop. - * For non-secure Hadoop, this always returns false. - * For secure Hadoop, it will return the value from - * {@code UserGroupInformation.isSecurityEnabled()}. - */ - public static boolean isSecurityEnabled() { - if (IS_SECURE_HADOOP) { - return SecureHadoopUser.isSecurityEnabled(); - } else { - return HadoopUser.isSecurityEnabled(); - } - } - - /** - * Returns whether or not secure authentication is enabled for HBase - * (whether hbase.security.authentication is set to - * kerberos. - */ - public static boolean isHBaseSecurityEnabled(Configuration conf) { - return "kerberos".equalsIgnoreCase(conf.get(HBASE_SECURITY_CONF_KEY)); - } - - /* Concrete implementations */ - - /** - * Bridges {@link User} calls to invocations of the appropriate methods - * in {@link org.apache.hadoop.security.UserGroupInformation} in regular - * Hadoop 0.20 (ASF Hadoop and other versions without the backported security - * features). - */ - private static class HadoopUser extends User { - - private HadoopUser() { - try { - ugi = (UserGroupInformation) callStatic("getCurrentUGI"); - if (ugi == null) { - // Secure Hadoop UGI will perform an implicit login if the current - // user is null. Emulate the same behavior here for consistency - Configuration conf = HBaseConfiguration.create(); - ugi = (UserGroupInformation) callStatic("login", - new Class[]{ Configuration.class }, new Object[]{ conf }); - if (ugi != null) { - callStatic("setCurrentUser", - new Class[]{ UserGroupInformation.class }, new Object[]{ ugi }); - } - } - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unexpected exception HadoopUser"); - } - } - - private HadoopUser(UserGroupInformation ugi) { - this.ugi = ugi; - } - - @Override - public String getShortName() { - return ugi != null ? ugi.getUserName() : null; - } - - @Override - public T runAs(PrivilegedAction action) { - T result = null; - UserGroupInformation previous = null; - try { - previous = (UserGroupInformation) callStatic("getCurrentUGI"); - try { - if (ugi != null) { - callStatic("setCurrentUser", new Class[]{UserGroupInformation.class}, - new Object[]{ugi}); - } - result = action.run(); - } finally { - callStatic("setCurrentUser", new Class[]{UserGroupInformation.class}, - new Object[]{previous}); - } - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unexpected exception in runAs()"); - } - return result; - } - - @Override - public T runAs(PrivilegedExceptionAction action) - throws IOException, InterruptedException { - T result = null; - try { - UserGroupInformation previous = - (UserGroupInformation) callStatic("getCurrentUGI"); - try { - if (ugi != null) { - callStatic("setCurrentUGI", new Class[]{UserGroupInformation.class}, - new Object[]{ugi}); - } - result = action.run(); - } finally { - callStatic("setCurrentUGI", new Class[]{UserGroupInformation.class}, - new Object[]{previous}); - } - } catch (Exception e) { - if (e instanceof IOException) { - throw (IOException)e; - } else if (e instanceof InterruptedException) { - throw (InterruptedException)e; - } else if (e instanceof RuntimeException) { - throw (RuntimeException)e; - } else { - throw new UndeclaredThrowableException(e, "Unknown exception in runAs()"); - } - } - return result; - } - - @Override - public void obtainAuthTokenForJob(Configuration conf, Job job) - throws IOException, InterruptedException { - // this is a no-op. token creation is only supported for kerberos - // authenticated clients - } - - @Override - public void obtainAuthTokenForJob(JobConf job) - throws IOException, InterruptedException { - // this is a no-op. token creation is only supported for kerberos - // authenticated clients - } - - /** @see User#createUserForTesting(org.apache.hadoop.conf.Configuration, String, String[]) */ - public static User createUserForTesting(Configuration conf, - String name, String[] groups) { - try { - Class c = Class.forName("org.apache.hadoop.security.UnixUserGroupInformation"); - Constructor constructor = c.getConstructor(String.class, String[].class); - if (constructor == null) { - throw new NullPointerException( - ); - } - UserGroupInformation newUser = - (UserGroupInformation)constructor.newInstance(name, groups); - // set user in configuration -- hack for regular hadoop - conf.set("hadoop.job.ugi", newUser.toString()); - return new HadoopUser(newUser); - } catch (ClassNotFoundException cnfe) { - throw new RuntimeException( - "UnixUserGroupInformation not found, is this secure Hadoop?", cnfe); - } catch (NoSuchMethodException nsme) { - throw new RuntimeException( - "No valid constructor found for UnixUserGroupInformation!", nsme); - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unexpected exception instantiating new UnixUserGroupInformation"); - } - } - - /** - * No-op since we're running on a version of Hadoop that doesn't support - * logins. - * @see User#login(org.apache.hadoop.conf.Configuration, String, String, String) - */ - public static void login(Configuration conf, String fileConfKey, - String principalConfKey, String localhost) throws IOException { - LOG.info("Skipping login, not running on secure Hadoop"); - } - - /** Always returns {@code false}. */ - public static boolean isSecurityEnabled() { - return false; - } - } - - /** - * Bridges {@code User} invocations to underlying calls to - * {@link org.apache.hadoop.security.UserGroupInformation} for secure Hadoop - * 0.20 and versions 0.21 and above. - */ - private static class SecureHadoopUser extends User { - private String shortName; - - private SecureHadoopUser() throws IOException { - try { - ugi = (UserGroupInformation) callStatic("getCurrentUser"); - } catch (IOException ioe) { - throw ioe; - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unexpected exception getting current secure user"); - } - } - - private SecureHadoopUser(UserGroupInformation ugi) { - this.ugi = ugi; - } - - @Override - public String getShortName() { - if (shortName != null) return shortName; - - try { - shortName = (String)call(ugi, "getShortUserName", null, null); - return shortName; - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unexpected error getting user short name"); - } - } - - @Override - public T runAs(PrivilegedAction action) { - try { - return (T) call(ugi, "doAs", new Class[]{PrivilegedAction.class}, - new Object[]{action}); - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unexpected exception in runAs()"); - } - } - - @Override - public T runAs(PrivilegedExceptionAction action) - throws IOException, InterruptedException { - try { - return (T) call(ugi, "doAs", - new Class[]{PrivilegedExceptionAction.class}, - new Object[]{action}); - } catch (IOException ioe) { - throw ioe; - } catch (InterruptedException ie) { - throw ie; - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unexpected exception in runAs(PrivilegedExceptionAction)"); - } - } - - @Override - public void obtainAuthTokenForJob(Configuration conf, Job job) - throws IOException, InterruptedException { - try { - Class c = Class.forName( - "org.apache.hadoop.hbase.security.token.TokenUtil"); - Methods.call(c, null, "obtainTokenForJob", - new Class[]{Configuration.class, UserGroupInformation.class, - Job.class}, - new Object[]{conf, ugi, job}); - } catch (ClassNotFoundException cnfe) { - throw new RuntimeException("Failure loading TokenUtil class, " - +"is secure RPC available?", cnfe); - } catch (IOException ioe) { - throw ioe; - } catch (InterruptedException ie) { - throw ie; - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unexpected error calling TokenUtil.obtainAndCacheToken()"); - } - } - - @Override - public void obtainAuthTokenForJob(JobConf job) - throws IOException, InterruptedException { - try { - Class c = Class.forName( - "org.apache.hadoop.hbase.security.token.TokenUtil"); - Methods.call(c, null, "obtainTokenForJob", - new Class[]{JobConf.class, UserGroupInformation.class}, - new Object[]{job, ugi}); - } catch (ClassNotFoundException cnfe) { - throw new RuntimeException("Failure loading TokenUtil class, " - +"is secure RPC available?", cnfe); - } catch (IOException ioe) { - throw ioe; - } catch (InterruptedException ie) { - throw ie; - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unexpected error calling TokenUtil.obtainAndCacheToken()"); - } - } - - /** @see User#createUserForTesting(org.apache.hadoop.conf.Configuration, String, String[]) */ - public static User createUserForTesting(Configuration conf, - String name, String[] groups) { - try { - return new SecureHadoopUser( - (UserGroupInformation)callStatic("createUserForTesting", - new Class[]{String.class, String[].class}, - new Object[]{name, groups}) - ); - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Error creating secure test user"); - } - } - - /** - * Obtain credentials for the current process using the configured - * Kerberos keytab file and principal. - * @see User#login(org.apache.hadoop.conf.Configuration, String, String, String) - * - * @param conf the Configuration to use - * @param fileConfKey Configuration property key used to store the path - * to the keytab file - * @param principalConfKey Configuration property key used to store the - * principal name to login as - * @param localhost the local hostname - */ - public static void login(Configuration conf, String fileConfKey, - String principalConfKey, String localhost) throws IOException { - if (isSecurityEnabled()) { - // check for SecurityUtil class - try { - Class c = Class.forName("org.apache.hadoop.security.SecurityUtil"); - Class[] types = new Class[]{ - Configuration.class, String.class, String.class, String.class }; - Object[] args = new Object[]{ - conf, fileConfKey, principalConfKey, localhost }; - Methods.call(c, null, "login", types, args); - } catch (ClassNotFoundException cnfe) { - throw new RuntimeException("Unable to login using " + - "org.apache.hadoop.security.SecurityUtil.login(). SecurityUtil class " + - "was not found! Is this a version of secure Hadoop?", cnfe); - } catch (IOException ioe) { - throw ioe; - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unhandled exception in User.login()"); - } - } - } - - /** - * Returns the result of {@code UserGroupInformation.isSecurityEnabled()}. - */ - public static boolean isSecurityEnabled() { - try { - return (Boolean)callStatic("isSecurityEnabled"); - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new UndeclaredThrowableException(e, - "Unexpected exception calling UserGroupInformation.isSecurityEnabled()"); - } - } - } - - /* Reflection helper methods */ - private static Object callStatic(String methodName) throws Exception { - return call(null, methodName, null, null); - } - - private static Object callStatic(String methodName, Class[] types, - Object[] args) throws Exception { - return call(null, methodName, types, args); - } - - private static Object call(UserGroupInformation instance, String methodName, - Class[] types, Object[] args) throws Exception { - return Methods.call(UserGroupInformation.class, instance, methodName, types, - args); - } -} Index: src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java (working copy) @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.token.TokenUtil; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.FileInputFormat; @@ -34,6 +34,7 @@ import org.apache.hadoop.mapred.OutputFormat; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapred.TextOutputFormat; +import org.apache.hadoop.security.UserGroupInformation; /** * Utility for {@link TableMap} and {@link TableReduce} @@ -169,9 +170,9 @@ } public static void initCredentials(JobConf job) throws IOException { - if (User.isHBaseSecurityEnabled(job)) { + if (UserGroupInformation.isSecurityEnabled()) { try { - User.getCurrent().obtainAuthTokenForJob(job); + TokenUtil.obtainTokenForJob(job,UserGroupInformation.getCurrentUser()); } catch (InterruptedException ie) { ie.printStackTrace(); Thread.interrupted(); Index: src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java (working copy) @@ -80,7 +80,6 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; @@ -93,6 +92,7 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.ipc.RemoteException; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.zookeeper.KeeperException; import com.google.protobuf.ServiceException; @@ -432,9 +432,9 @@ this.properties = Collections.unmodifiableMap(m); try { - User currentUser = User.getCurrent(); + UserGroupInformation currentUser = UserGroupInformation.getCurrentUser(); if (currentUser != null) { - username = currentUser.getName(); + username = currentUser.getUserName(); } } catch (IOException ioe) { LOG.warn("Error obtaining current user, skipping username in HConnectionKey", Index: src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java (working copy) @@ -66,7 +66,6 @@ import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.filter.WhileMatchFilter; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.thrift.CallQueue.Call; import org.apache.hadoop.hbase.thrift.generated.AlreadyExists; import org.apache.hadoop.hbase.thrift.generated.BatchMutation; @@ -84,6 +83,8 @@ import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.net.DNS; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.thrift.TException; import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TCompactProtocol; @@ -363,11 +364,11 @@ } // login the server principal (if using secure Hadoop) - if (User.isSecurityEnabled() && User.isHBaseSecurityEnabled(conf)) { + if (UserGroupInformation.isSecurityEnabled()) { String machineName = Strings.domainNamePointerToHostName( DNS.getDefaultHost(conf.get("hbase.thrift.dns.interface", "default"), conf.get("hbase.thrift.dns.nameserver", "default"))); - User.login(conf, "hbase.thrift.keytab.file", + SecurityUtil.login(conf, "hbase.thrift.keytab.file", "hbase.thrift.kerberos.principal", machineName); } Index: src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (working copy) @@ -132,7 +132,6 @@ import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CompressionTest; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -154,6 +153,8 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.metrics.util.MBeanUtil; import org.apache.hadoop.net.DNS; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import org.apache.zookeeper.KeeperException; @@ -385,8 +386,8 @@ this.startcode = System.currentTimeMillis(); // login the server principal (if using secure Hadoop) - User.login(this.conf, "hbase.regionserver.keytab.file", - "hbase.regionserver.kerberos.principal", this.isa.getHostName()); + SecurityUtil.login(conf, "hbase.regionserver.keytab.file", + "hbase.regionserver.kerberos.principal", this.isa.getHostName()); regionServerAccounting = new RegionServerAccounting(); cacheConfig = new CacheConfig(conf); } Index: src/main/java/org/apache/hadoop/hbase/master/HMaster.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/master/HMaster.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/master/HMaster.java (working copy) @@ -95,7 +95,6 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.replication.regionserver.Replication; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.HasThread; @@ -115,6 +114,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.metrics.util.MBeanUtil; import org.apache.hadoop.net.DNS; +import org.apache.hadoop.security.SecurityUtil; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.Watcher; @@ -276,7 +276,7 @@ conf.getLong("hbase.master.buffer.for.rs.fatals", 1*1024*1024)); // initialize server principal (if using secure Hadoop) - User.login(conf, "hbase.master.keytab.file", + SecurityUtil.login(conf, "hbase.master.keytab.file", "hbase.master.kerberos.principal", this.isa.getHostName()); // set the thread name now we have an address Index: src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java (working copy) @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; -import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.token.TokenUtil; import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.ZKUtil; @@ -53,6 +53,7 @@ import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; /** @@ -221,9 +222,10 @@ } public static void initCredentials(Job job) throws IOException { - if (User.isHBaseSecurityEnabled(job.getConfiguration())) { + if (UserGroupInformation.isSecurityEnabled()) { try { - User.getCurrent().obtainAuthTokenForJob(job.getConfiguration(), job); + TokenUtil.obtainTokenForJob(job.getConfiguration(), + UserGroupInformation.getCurrentUser(), job); } catch (InterruptedException ie) { LOG.info("Interrupted obtaining user authentication token"); Thread.interrupted(); Index: src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java (working copy) @@ -46,7 +46,7 @@ import org.apache.hadoop.io.*; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.hbase.ipc.VersionedProtocol; -import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.*; @@ -129,13 +129,13 @@ private static class Invoker implements InvocationHandler { private Class protocol; private InetSocketAddress address; - private User ticket; + private UserGroupInformation ticket; private HBaseClient client; private boolean isClosed = false; final private int rpcTimeout; public Invoker(Class protocol, - InetSocketAddress address, User ticket, + InetSocketAddress address, UserGroupInformation ticket, Configuration conf, SocketFactory factory, int rpcTimeout) { this.protocol = protocol; this.address = address; @@ -176,7 +176,7 @@ * talking to a server at the named address. */ public VersionedProtocol getProxy( Class protocol, long clientVersion, - InetSocketAddress addr, User ticket, + InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, SocketFactory factory, int rpcTimeout) throws IOException { @@ -210,7 +210,7 @@ public Object[] call(Method method, Object[][] params, InetSocketAddress[] addrs, Class protocol, - User ticket, Configuration conf) + UserGroupInformation ticket, Configuration conf) throws IOException, InterruptedException { Invocation[] invocations = new Invocation[params.length]; Index: src/main/java/org/apache/hadoop/hbase/ipc/ConnectionHeader.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/ipc/ConnectionHeader.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/ipc/ConnectionHeader.java (working copy) @@ -1,77 +0,0 @@ -/* - * Copyright 2010 The Apache Software Foundation - * - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.ipc; - -import java.io.DataInput; -import java.io.DataOutput; -import java.io.IOException; - -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.security.User; - -/** - * The IPC connection header sent by the client to the server - * on connection establishment. - */ -@InterfaceAudience.Private -class ConnectionHeader implements Writable { - protected String protocol; - - public ConnectionHeader() {} - - /** - * Create a new {@link ConnectionHeader} with the given protocol - * and {@link User}. - * @param protocol protocol used for communication between the IPC client - * and the server - * @param user {@link User} of the client communicating with - * the server - */ - public ConnectionHeader(String protocol, User user) { - this.protocol = protocol; - } - - @Override - public void readFields(DataInput in) throws IOException { - protocol = Text.readString(in); - if (protocol.isEmpty()) { - protocol = null; - } - } - - @Override - public void write(DataOutput out) throws IOException { - Text.writeString(out, (protocol == null) ? "" : protocol); - } - - public String getProtocol() { - return protocol; - } - - public User getUser() { - return null; - } - - public String toString() { - return protocol; - } -} Index: src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java (working copy) @@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.ipc; +import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; @@ -34,6 +36,7 @@ import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.channels.CancelledKeyException; +import java.nio.channels.Channels; import java.nio.channels.ClosedChannelException; import java.nio.channels.ReadableByteChannel; import java.nio.channels.SelectionKey; @@ -41,6 +44,7 @@ import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.channels.WritableByteChannel; +import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -55,6 +59,10 @@ import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; +import javax.security.sasl.Sasl; +import javax.security.sasl.SaslException; +import javax.security.sasl.SaslServer; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -67,14 +75,33 @@ import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequest; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcException; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation; import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.monitoring.TaskMonitor; -import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.HBaseSaslRpcServer; +import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.AuthMethod; +import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslDigestCallbackHandler; +import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslGssCallbackHandler; +import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslStatus; import org.apache.hadoop.hbase.util.ByteBufferOutputStream; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.DataOutputBuffer; +import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.WritableUtils; +import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.RPC.VersionMismatch; +import org.apache.hadoop.security.AccessControlException; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; +import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.authorize.ProxyUsers; +import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; +import org.apache.hadoop.security.token.SecretManager; +import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; @@ -95,7 +122,8 @@ */ @InterfaceAudience.Private public abstract class HBaseServer implements RpcServer { - + private final boolean authorize; + private boolean isSecurityEnabled; /** * The first four bytes of Hadoop RPC connections */ @@ -128,6 +156,13 @@ LogFactory.getLog("org.apache.hadoop.ipc.HBaseServer"); protected static final Log TRACELOG = LogFactory.getLog("org.apache.hadoop.ipc.HBaseServer.trace"); + + private static final String AUTH_FAILED_FOR = "Auth failed for "; + private static final String AUTH_SUCCESSFUL_FOR = "Auth successful for "; + private static final Log AUDITLOG = + LogFactory.getLog("SecurityLogger."+Server.class.getName()); + protected SecretManager secretManager; + protected ServiceAuthorizationManager authManager; protected static final ThreadLocal SERVER = new ThreadLocal(); @@ -300,9 +335,6 @@ protected synchronized void setResponse(Object value, Status status, String errorClass, String error) { - // Avoid overwriting an error value in the response. This can happen if - // endDelayThrowing is called by another thread before the actual call - // returning. if (this.isError) return; if (errorClass != null) { @@ -323,8 +355,7 @@ if (result instanceof WritableWithSize) { // get the size hint. WritableWithSize ohint = (WritableWithSize) result; - long hint = ohint.getWritableSize() + Bytes.SIZEOF_BYTE + - (2 * Bytes.SIZEOF_INT); + long hint = ohint.getWritableSize() + Bytes.SIZEOF_INT + Bytes.SIZEOF_INT; if (hint > Integer.MAX_VALUE) { // oops, new problem. IOException ioe = @@ -342,7 +373,7 @@ RpcResponse.Builder builder = RpcResponse.newBuilder(); // Call id. builder.setCallId(this.id); - builder.setError(error != null); + //builder.setStatus( if (error != null) { RpcException.Builder b = RpcException.newBuilder(); b.setExceptionName(errorClass); @@ -356,14 +387,37 @@ } builder.build().writeDelimitedTo( DataOutputOutputStream.constructOutputStream(out)); + if (connection.useWrap) { + wrapWithSasl(buf); + } } catch (IOException e) { LOG.warn("Exception while creating response " + e); } - ByteBuffer bb = buf.getByteBuffer(); - bb.position(0); - this.response = bb; + this.response = buf.getByteBuffer(); } + private void wrapWithSasl(ByteBufferOutputStream response) + throws IOException { + if (connection.useSasl) { + // getByteBuffer calls flip() + ByteBuffer buf = response.getByteBuffer(); + byte[] token; + // synchronization may be needed since there can be multiple Handler + // threads using saslServer to wrap responses. + synchronized (connection.saslServer) { + token = connection.saslServer.wrap(buf.array(), + buf.arrayOffset(), buf.remaining()); + } + if (LOG.isDebugEnabled()) + LOG.debug("Adding saslServer wrapped token of size " + token.length + + " as call response."); + buf.clear(); + DataOutputStream saslOut = new DataOutputStream(response); + saslOut.writeInt(token.length); + saslOut.write(token, 0, token.length); + } + } + @Override public synchronized void endDelay(Object result) throws IOException { assert this.delayResponse; @@ -1041,8 +1095,8 @@ } /** Reads calls from a connection and queues them for handling. */ - protected class Connection { - private boolean versionRead = false; //if initial signature and + public class Connection { + private boolean rpcHeaderRead = false; //if initial signature and //version are read private boolean headerRead = false; //if the connection header that //follows version is read. @@ -1053,6 +1107,7 @@ private volatile int rpcCount = 0; // number of outstanding rpcs private long lastContact; private int dataLength; + private InetAddress addr; protected Socket socket; // Cache the remote host & port info so that even if the socket is // disconnected, we can say where it used to connect to. @@ -1060,7 +1115,28 @@ protected int remotePort; ConnectionHeader header; Class protocol; - protected User user = null; + protected UserGroupInformation user = null; + private AuthMethod authMethod; + private boolean saslContextEstablished; + private boolean skipInitialSaslHandshake; + private ByteBuffer rpcHeaderBuffer; + private ByteBuffer unwrappedData; + private ByteBuffer unwrappedDataLengthBuffer; + boolean useSasl; + SaslServer saslServer; + private boolean useWrap = false; + // Fake 'call' for failed authorization response + private final int AUTHROIZATION_FAILED_CALLID = -1; + private final Call authFailedCall = + new Call(AUTHROIZATION_FAILED_CALLID, null, this, null, 0); + private ByteArrayOutputStream authFailedResponse = + new ByteArrayOutputStream(); + // Fake 'call' for SASL context setup + private static final int SASL_CALLID = -33; + private final Call saslCall = new Call(SASL_CALLID, null, this, null, 0); + private final ByteArrayOutputStream saslResponse = + new ByteArrayOutputStream(); + public UserGroupInformation attemptingUser = null; // user name before auth public Connection(SocketChannel channel, long lastContact) { this.channel = channel; @@ -1095,6 +1171,10 @@ return hostAddress; } + public InetAddress getHostInetAddress() { + return addr; + } + public int getRemotePort() { return remotePort; } @@ -1126,39 +1206,203 @@ return isIdle() && currentTime - lastContact > maxIdleTime; } + private UserGroupInformation getAuthorizedUgi(String authorizedId) + throws IOException { + if (authMethod == AuthMethod.DIGEST) { + TokenIdentifier tokenId = HBaseSaslRpcServer.getIdentifier(authorizedId, + secretManager); + UserGroupInformation ugi = tokenId.getUser(); + if (ugi == null) { + throw new AccessControlException( + "Can't retrieve username from tokenIdentifier."); + } + ugi.addTokenIdentifier(tokenId); + return ugi; + } else { + return UserGroupInformation.createRemoteUser(authorizedId); + } + } + + private void saslReadAndProcess(byte[] saslToken) throws IOException, + InterruptedException { + if (!saslContextEstablished) { + byte[] replyToken = null; + try { + if (saslServer == null) { + switch (authMethod) { + case DIGEST: + if (secretManager == null) { + throw new AccessControlException( + "Server is not configured to do DIGEST authentication."); + } + saslServer = Sasl.createSaslServer(AuthMethod.DIGEST + .getMechanismName(), null, HBaseSaslRpcServer.SASL_DEFAULT_REALM, + HBaseSaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler( + secretManager, this)); + break; + default: + UserGroupInformation current = UserGroupInformation + .getCurrentUser(); + String fullName = current.getUserName(); + if (LOG.isDebugEnabled()) + LOG.debug("Kerberos principal name is " + fullName); + final String names[] = HBaseSaslRpcServer.splitKerberosName(fullName); + if (names.length != 3) { + throw new AccessControlException( + "Kerberos principal name does NOT have the expected " + + "hostname part: " + fullName); + } + current.doAs(new PrivilegedExceptionAction() { + @Override + public Object run() throws SaslException { + saslServer = Sasl.createSaslServer(AuthMethod.KERBEROS + .getMechanismName(), names[0], names[1], + HBaseSaslRpcServer.SASL_PROPS, new SaslGssCallbackHandler()); + return null; + } + }); + } + if (saslServer == null) + throw new AccessControlException( + "Unable to find SASL server implementation for " + + authMethod.getMechanismName()); + if (LOG.isDebugEnabled()) + LOG.debug("Created SASL server with mechanism = " + + authMethod.getMechanismName()); + } + if (LOG.isDebugEnabled()) + LOG.debug("Have read input token of size " + saslToken.length + + " for processing by saslServer.evaluateResponse()"); + replyToken = saslServer.evaluateResponse(saslToken); + } catch (IOException e) { + IOException sendToClient = e; + Throwable cause = e; + while (cause != null) { + if (cause instanceof InvalidToken) { + sendToClient = (InvalidToken) cause; + break; + } + cause = cause.getCause(); + } + doSaslReply(SaslStatus.ERROR, null, sendToClient.getClass().getName(), + sendToClient.getLocalizedMessage()); + rpcMetrics.authenticationFailures.inc(); + String clientIP = this.toString(); + // attempting user could be null + AUDITLOG.warn(AUTH_FAILED_FOR + clientIP + ":" + attemptingUser); + throw e; + } + if (replyToken != null) { + if (LOG.isDebugEnabled()) + LOG.debug("Will send token of size " + replyToken.length + + " from saslServer."); + doSaslReply(SaslStatus.SUCCESS, new BytesWritable(replyToken), null, + null); + } + if (saslServer.isComplete()) { + LOG.debug("SASL server context established. Negotiated QoP is " + + saslServer.getNegotiatedProperty(Sasl.QOP)); + String qop = (String) saslServer.getNegotiatedProperty(Sasl.QOP); + useWrap = qop != null && !"auth".equalsIgnoreCase(qop); + user = getAuthorizedUgi(saslServer.getAuthorizationID()); + LOG.debug("SASL server successfully authenticated client: " + user); + rpcMetrics.authenticationSuccesses.inc(); + AUDITLOG.trace(AUTH_SUCCESSFUL_FOR + user); + saslContextEstablished = true; + } + } else { + if (LOG.isDebugEnabled()) + LOG.debug("Have read input token of size " + saslToken.length + + " for processing by saslServer.unwrap()"); + + if (!useWrap) { + processOneRpc(saslToken); + } else { + byte[] plaintextData = saslServer.unwrap(saslToken, 0, + saslToken.length); + processUnwrappedData(plaintextData); + } + } + } + + private void doSaslReply(SaslStatus status, Writable rv, + String errorClass, String error) throws IOException { + saslCall.setResponse(rv, + status == SaslStatus.SUCCESS ? Status.SUCCESS : Status.ERROR, + errorClass, error); + saslCall.responder = responder; + saslCall.sendResponseIfReady(); + } + + private void disposeSasl() { + if (saslServer != null) { + try { + saslServer.dispose(); + } catch (SaslException ignored) { + } + } + } + public int readAndProcess() throws IOException, InterruptedException { while (true) { /* Read at most one RPC. If the header is not read completely yet * then iterate until we read first RPC or until there is no data left. */ - int count; + int count = -1; if (dataLengthBuffer.remaining() > 0) { count = channelRead(channel, dataLengthBuffer); if (count < 0 || dataLengthBuffer.remaining() > 0) return count; } - if (!versionRead) { + if (!rpcHeaderRead) { //Every connection is expected to send the header. - ByteBuffer versionBuffer = ByteBuffer.allocate(1); - count = channelRead(channel, versionBuffer); - if (count <= 0) { + if (rpcHeaderBuffer == null) { + rpcHeaderBuffer = ByteBuffer.allocate(2); + } + count = channelRead(channel, rpcHeaderBuffer); + if (count < 0 || rpcHeaderBuffer.remaining() > 0) { return count; } - int version = versionBuffer.get(0); - + int version = rpcHeaderBuffer.get(0); + byte[] method = new byte[] {rpcHeaderBuffer.get(1)}; + authMethod = AuthMethod.read(new DataInputStream( + new ByteArrayInputStream(method))); dataLengthBuffer.flip(); if (!HEADER.equals(dataLengthBuffer) || version != CURRENT_VERSION) { - //Warning is ok since this is not supposed to happen. - LOG.warn("Incorrect header or version mismatch from " + - hostAddress + ":" + remotePort + - " got version " + version + - " expected version " + CURRENT_VERSION); - setupBadVersionResponse(version); + LOG.warn("Incorrect header or version mismatch from " + + hostAddress + ":" + remotePort + + " got version " + version + + " expected version " + CURRENT_VERSION); return -1; } dataLengthBuffer.clear(); - versionRead = true; + if (authMethod == null) { + throw new IOException("Unable to read authentication method"); + } + if (isSecurityEnabled && authMethod == AuthMethod.SIMPLE) { + AccessControlException ae = new AccessControlException( + "Authentication is required"); + setupResponse(authFailedResponse, authFailedCall, Status.FATAL, + null, ae.getClass().getName(), ae.getMessage()); + responder.doRespond(authFailedCall); + throw ae; + } + if (!isSecurityEnabled && authMethod != AuthMethod.SIMPLE) { + doSaslReply(SaslStatus.SUCCESS, new IntWritable( + HBaseSaslRpcServer.SWITCH_TO_SIMPLE_AUTH), null, null); + authMethod = AuthMethod.SIMPLE; + // client has already sent the initial Sasl message and we + // should ignore it. Both client and server should fall back + // to simple auth from now on. + skipInitialSaslHandshake = true; + } + if (authMethod != AuthMethod.SIMPLE) { + useSasl = true; + } + + rpcHeaderBuffer = null; + rpcHeaderRead = true; continue; } @@ -1167,9 +1411,15 @@ dataLength = dataLengthBuffer.getInt(); if (dataLength == HBaseClient.PING_CALL_ID) { - dataLengthBuffer.clear(); - return 0; //ping message + if(!useWrap) { //covers the !useSasl too + dataLengthBuffer.clear(); + return 0; //ping message + } } + if (dataLength < 0) { + LOG.warn("Unexpected data length " + dataLength + "!! from " + + getHostAddress()); + } data = ByteBuffer.allocate(dataLength); incRpcCount(); // Increment the rpc count } @@ -1179,15 +1429,21 @@ if (data.remaining() == 0) { dataLengthBuffer.clear(); data.flip(); - if (headerRead) { - processData(data.array()); + if (skipInitialSaslHandshake) { data = null; - return count; + skipInitialSaslHandshake = false; + continue; } - processHeader(); - headerRead = true; + boolean isHeaderRead = headerRead; + if (useSasl) { + saslReadAndProcess(data.array()); + } else { + processOneRpc(data.array()); + } data = null; - continue; + if (!isHeaderRead) { + continue; + } } return count; } @@ -1223,18 +1479,106 @@ } /// Reads the connection header following version - private void processHeader() throws IOException { - header = ConnectionHeader.parseFrom(new ByteArrayInputStream(data.array())); + private void processHeader(byte[] buf) throws IOException { + DataInputStream in = + new DataInputStream(new ByteArrayInputStream(buf)); + header = ConnectionHeader.parseFrom(in); try { String protocolClassName = header.getProtocol(); - protocol = getProtocolClass(protocolClassName, conf); + if (protocolClassName != null) { + protocol = getProtocolClass(header.getProtocol(), conf); + } } catch (ClassNotFoundException cnfe) { throw new IOException("Unknown protocol: " + header.getProtocol()); } - user = User.createUser(header); + UserGroupInformation protocolUser = createUser(header); + if (!useSasl) { + user = protocolUser; + if (user != null) { + user.setAuthenticationMethod(AuthMethod.SIMPLE.authenticationMethod); + } + } else { + // user is authenticated + user.setAuthenticationMethod(authMethod.authenticationMethod); + //Now we check if this is a proxy user case. If the protocol user is + //different from the 'user', it is a proxy user scenario. However, + //this is not allowed if user authenticated with DIGEST. + if ((protocolUser != null) + && (!protocolUser.getUserName().equals(user.getUserName()))) { + if (authMethod == AuthMethod.DIGEST) { + // Not allowed to doAs if token authentication is used + throw new AccessControlException("Authenticated user (" + user + + ") doesn't match what the client claims to be (" + + protocolUser + ")"); + } else { + // Effective user can be different from authenticated user + // for simple auth or kerberos auth + // The user is the real user. Now we create a proxy user + UserGroupInformation realUser = user; + user = UserGroupInformation.createProxyUser(protocolUser + .getUserName(), realUser); + // Now the user is a proxy user, set Authentication method Proxy. + user.setAuthenticationMethod(AuthenticationMethod.PROXY); + } + } + } } + private void processUnwrappedData(byte[] inBuf) throws IOException, + InterruptedException { + ReadableByteChannel ch = Channels.newChannel(new ByteArrayInputStream( + inBuf)); + // Read all RPCs contained in the inBuf, even partial ones + while (true) { + int count = -1; + if (unwrappedDataLengthBuffer.remaining() > 0) { + count = channelRead(ch, unwrappedDataLengthBuffer); + if (count <= 0 || unwrappedDataLengthBuffer.remaining() > 0) + return; + } + + if (unwrappedData == null) { + unwrappedDataLengthBuffer.flip(); + int unwrappedDataLength = unwrappedDataLengthBuffer.getInt(); + + if (unwrappedDataLength == HBaseClient.PING_CALL_ID) { + if (LOG.isDebugEnabled()) + LOG.debug("Received ping message"); + unwrappedDataLengthBuffer.clear(); + continue; // ping message + } + unwrappedData = ByteBuffer.allocate(unwrappedDataLength); + } + + count = channelRead(ch, unwrappedData); + if (count <= 0 || unwrappedData.remaining() > 0) + return; + + if (unwrappedData.remaining() == 0) { + unwrappedDataLengthBuffer.clear(); + unwrappedData.flip(); + processOneRpc(unwrappedData.array()); + unwrappedData = null; + } + } + } + + private void processOneRpc(byte[] buf) throws IOException, + InterruptedException { + if (headerRead) { + processData(buf); + } else { + processHeader(buf); + headerRead = true; + if (!authorizeConnection()) { + throw new AccessControlException("Connection from " + this + + " for protocol " + header.getProtocol() + + " is unauthorized for user " + user); + } + } + } + protected void processData(byte[] buf) throws IOException, InterruptedException { RpcRequest request = RpcRequest.parseFrom(buf); int id = request.getCallId(); @@ -1287,6 +1631,32 @@ updateCallQueueLenMetrics(callQueue); } } + + private boolean authorizeConnection() throws IOException { + try { + // If auth method is DIGEST, the token was obtained by the + // real user for the effective user, therefore not required to + // authorize real user. doAs is allowed only for simple or kerberos + // authentication + if (user != null && user.getRealUser() != null + && (authMethod != AuthMethod.DIGEST)) { + ProxyUsers.authorize(user, this.getHostAddress(), conf); + } + authorize(user, header, getHostInetAddress()); + if (LOG.isDebugEnabled()) { + LOG.debug("Successfully authorized " + header); + } + rpcMetrics.authorizationSuccesses.inc(); + } catch (AuthorizationException ae) { + LOG.debug("Connection authorization failed: "+ae.getMessage(), ae); + rpcMetrics.authorizationFailures.inc(); + setupResponse(authFailedResponse, authFailedCall, Status.FATAL, null, + ae.getClass().getName(), ae.getMessage()); + responder.doRespond(authFailedCall); + return false; + } + return true; + } protected synchronized void close() { data = null; @@ -1299,6 +1669,33 @@ } try {socket.close();} catch(Exception ignored) {} } + + private UserGroupInformation createUser(ConnectionHeader head) { + UserGroupInformation ugi = null; + + if (!head.hasUserInfo()) { + return null; + } + UserInformation userInfoProto = head.getUserInfo(); + String effectiveUser = null; + if (userInfoProto.hasEffectiveUser()) { + effectiveUser = userInfoProto.getEffectiveUser(); + } + String realUser = null; + if (userInfoProto.hasRealUser()) { + realUser = userInfoProto.getRealUser(); + } + if (effectiveUser != null) { + if (realUser != null) { + UserGroupInformation realUserUgi = + UserGroupInformation.createRemoteUser(realUser); + ugi = UserGroupInformation.createProxyUser(effectiveUser, realUserUgi); + } else { + ugi = UserGroupInformation.createRemoteUser(effectiveUser); + } + } + return ugi; + } } /** @@ -1362,9 +1759,10 @@ throw new ServerNotRunningYetException("Server is not running yet"); if (LOG.isDebugEnabled()) { - User remoteUser = call.connection.user; + UserGroupInformation remoteUser = call.connection.user; LOG.debug(getName() + ": call #" + call.id + " executing as " - + (remoteUser == null ? "NULL principal" : remoteUser.getName())); + + (remoteUser == null ? "NULL principal" : + remoteUser.getUserName())); } RequestContext.set(call.connection.user, getRemoteIp(), @@ -1502,6 +1900,13 @@ // Create the responder here responder = new Responder(); + this.authorize = + conf.getBoolean(HADOOP_SECURITY_AUTHORIZATION, false); + this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled(); + LOG.debug("security enabled="+isSecurityEnabled); + if (isSecurityEnabled) { + HBaseSaslRpcServer.init(conf); + } } /** @@ -1557,6 +1962,10 @@ rpcMetrics.numOpenConnections.set(numConnections); } + Configuration getConf() { + return conf; + } + /** Sets the socket buffer size used for responding to RPCs. * @param size send size */ @@ -1601,7 +2010,15 @@ } } } + + public SecretManager getSecretManager() { + return this.secretManager; + } + public void setSecretManager(SecretManager secretManager) { + this.secretManager = (SecretManager) secretManager; + } + /** Stops the service. No new calls will be handled after this is called. */ @Override public synchronized void stop() { @@ -1666,6 +2083,31 @@ public HBaseRpcMetrics getRpcMetrics() { return rpcMetrics; } + + /** + * Authorize the incoming client connection. + * + * @param user client user + * @param connection incoming connection + * @param addr InetAddress of incoming connection + * @throws org.apache.hadoop.security.authorize.AuthorizationException when the client isn't authorized to talk the protocol + */ + public void authorize(UserGroupInformation user, + ConnectionHeader connection, + InetAddress addr + ) throws AuthorizationException { + if (authorize) { + Class protocol = null; + try { + protocol = getProtocolClass(connection.getProtocol(), getConf()); + } catch (ClassNotFoundException cfne) { + throw new AuthorizationException("Unknown protocol: " + + connection.getProtocol()); + } + authManager.authorize(user != null ? user : null, + protocol, getConf(), addr); + } + } /** * When the read or write buffer size is larger than this limit, i/o will be Index: src/main/java/org/apache/hadoop/hbase/ipc/RpcEngine.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/ipc/RpcEngine.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/ipc/RpcEngine.java (working copy) @@ -25,7 +25,7 @@ import javax.net.SocketFactory; import org.apache.hadoop.hbase.ipc.VersionedProtocol; -import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; @@ -36,7 +36,7 @@ /** Construct a client-side proxy object. */ VersionedProtocol getProxy(Class protocol, long clientVersion, InetSocketAddress addr, - User ticket, Configuration conf, + UserGroupInformation ticket, Configuration conf, SocketFactory factory, int rpcTimeout) throws IOException; /** Stop this proxy. */ @@ -45,7 +45,7 @@ /** Expert: Make multiple, parallel calls to a set of servers. */ Object[] call(Method method, Object[][] params, InetSocketAddress[] addrs, Class protocol, - User ticket, Configuration conf) + UserGroupInformation ticket, Configuration conf) throws IOException, InterruptedException; /** Construct a server for a protocol implementation instance. */ Index: src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java (working copy) @@ -28,13 +28,18 @@ import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; +import java.io.OutputStream; import java.net.ConnectException; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketException; import java.net.SocketTimeoutException; import java.net.UnknownHostException; +import java.security.PrivilegedExceptionAction; +import java.util.HashMap; import java.util.Iterator; +import java.util.Map; +import java.util.Random; import java.util.Map.Entry; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.atomic.AtomicBoolean; @@ -48,18 +53,34 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse.Status; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcRequest; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RpcResponse; -import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation; +import org.apache.hadoop.hbase.security.HBaseSaslRpcClient; +import org.apache.hadoop.hbase.security.KerberosInfo; +import org.apache.hadoop.hbase.security.TokenInfo; +import org.apache.hadoop.hbase.security.HBaseSaslRpcServer.AuthMethod; +import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier; +import org.apache.hadoop.hbase.security.token.AuthenticationTokenSelector; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.PoolMap; import org.apache.hadoop.hbase.util.PoolMap.PoolType; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.hbase.io.DataOutputOutputStream; +import org.apache.hadoop.hbase.ipc.HBaseClient.Connection.PingInputStream; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.security.token.TokenSelector; import org.apache.hadoop.util.ReflectionUtils; import com.google.protobuf.ByteString; @@ -213,7 +234,12 @@ return this.startTime; } } - + protected static Map> tokenHandlers = + new HashMap>(); + static { + tokenHandlers.put(AuthenticationTokenIdentifier.AUTH_TOKEN_TYPE.toString(), + new AuthenticationTokenSelector()); + } /** Thread that reads responses and notifies callers. Each connection owns a * socket connected to a remote address. Calls are multiplexed through this * socket: responses may be delivered out of order. */ @@ -223,7 +249,14 @@ protected Socket socket = null; // connected socket protected DataInputStream in; protected DataOutputStream out; - + private InetSocketAddress server; // server ip:port + private String serverPrincipal; // server's krb5 principal name + private AuthMethod authMethod; // authentication method + private boolean useSasl; + private Token token; + private HBaseSaslRpcClient saslRpcClient; + private int reloginMaxBackoff; // max pause before relogin on sasl failure + // currently active calls protected final ConcurrentSkipListMap calls = new ConcurrentSkipListMap(); protected final AtomicLong lastActivity = new AtomicLong();// last I/O activity time @@ -235,20 +268,89 @@ throw new UnknownHostException("unknown host: " + remoteId.getAddress().getHostName()); } + this.server = remoteId.getAddress(); + + UserGroupInformation ticket = remoteId.getTicket(); + Class protocol = remoteId.getProtocol(); + this.useSasl = UserGroupInformation.isSecurityEnabled(); + if (useSasl && protocol != null) { + TokenInfo tokenInfo = protocol.getAnnotation(TokenInfo.class); + if (tokenInfo != null) { + TokenSelector tokenSelector = + tokenHandlers.get(tokenInfo.value()); + if (tokenSelector != null) { + token = tokenSelector.selectToken(new Text(clusterId), + ticket.getTokens()); + } else if (LOG.isDebugEnabled()) { + LOG.debug("No token selector found for type "+tokenInfo.value()); + } + } + KerberosInfo krbInfo = protocol.getAnnotation(KerberosInfo.class); + if (krbInfo != null) { + String serverKey = krbInfo.serverPrincipal(); + if (serverKey == null) { + throw new IOException( + "Can't obtain server Kerberos config key from KerberosInfo"); + } + serverPrincipal = SecurityUtil.getServerPrincipal( + conf.get(serverKey), server.getAddress().getCanonicalHostName().toLowerCase()); + if (LOG.isDebugEnabled()) { + LOG.debug("RPC Server Kerberos principal name for protocol=" + + protocol.getCanonicalName() + " is " + serverPrincipal); + } + } + } + + if (!useSasl) { + authMethod = AuthMethod.SIMPLE; + } else if (token != null) { + authMethod = AuthMethod.DIGEST; + } else { + authMethod = AuthMethod.KERBEROS; + } + + if (LOG.isDebugEnabled()) + LOG.debug("Use " + authMethod + " authentication for protocol " + + protocol.getSimpleName()); + + reloginMaxBackoff = conf.getInt("hbase.security.relogin.maxbackoff", 5000); this.remoteId = remoteId; - User ticket = remoteId.getTicket(); - Class protocol = remoteId.getProtocol(); ConnectionHeader.Builder builder = ConnectionHeader.newBuilder(); builder.setProtocol(protocol == null ? "" : protocol.getName()); + UserInformation userInfoPB; + if ((userInfoPB = getUserInfoPB(ticket)) != null) { + builder.setUserInfo(userInfoPB); + } this.header = builder.build(); this.setName("IPC Client (" + socketFactory.hashCode() +") connection to " + remoteId.getAddress().toString() + - ((ticket==null)?" from an unknown user": (" from " + ticket.getName()))); + ((ticket==null)?" from an unknown user": (" from " + + ticket.getUserName()))); this.setDaemon(true); } + private UserInformation getUserInfoPB(UserGroupInformation ugi) { + UserInformation.Builder userInfoPB = UserInformation.newBuilder(); + if (ugi == null) { + return userInfoPB.build(); + } + if (ugi != null) { + if (authMethod == AuthMethod.KERBEROS) { + // Send effective user for Kerberos auth + userInfoPB.setEffectiveUser(ugi.getUserName()); + } else if (authMethod == AuthMethod.SIMPLE) { + //Send both effective user and real user for simple auth + userInfoPB.setEffectiveUser(ugi.getUserName()); + if (ugi.getRealUser() != null) { + userInfoPB.setRealUser(ugi.getRealUser().getUserName()); + } + } + } + return userInfoPB.build(); + } + /** Update lastActivity with the current time. */ protected void touch() { lastActivity.set(System.currentTimeMillis()); @@ -352,42 +454,6 @@ } } - /** Connect to the server and set up the I/O streams. It then sends - * a header to the server and starts - * the connection thread that waits for responses. - * @throws java.io.IOException e - */ - protected synchronized void setupIOstreams() - throws IOException, InterruptedException { - - if (socket != null || shouldCloseConnection.get()) { - return; - } - - try { - if (LOG.isDebugEnabled()) { - LOG.debug("Connecting to "+remoteId); - } - setupConnection(); - this.in = new DataInputStream(new BufferedInputStream - (new PingInputStream(NetUtils.getInputStream(socket)))); - this.out = new DataOutputStream - (new BufferedOutputStream(NetUtils.getOutputStream(socket))); - writeHeader(); - - // update last activity time - touch(); - - // start the receiver thread after the socket connection has been set up - start(); - } catch (IOException e) { - markClosed(e); - close(); - - throw e; - } - } - protected void closeConnection() { // close the current connection if (socket != null) { @@ -437,16 +503,6 @@ " time(s)."); } - /* Write the header for each connection - * Out is not synchronized because only the first thread does this. - */ - private void writeHeader() throws IOException { - out.write(HBaseServer.HEADER.array()); - out.write(HBaseServer.CURRENT_VERSION); - out.writeInt(header.getSerializedSize()); - header.writeTo(out); - } - /* wait till someone signals us to start reading RPC response or * it is idle too long, it is marked as to be closed, * or the client is marked as not running. @@ -518,7 +574,231 @@ LOG.debug(getName() + ": stopped, remaining connections " + connections.size()); } + + private synchronized void disposeSasl() { + if (saslRpcClient != null) { + try { + saslRpcClient.dispose(); + saslRpcClient = null; + } catch (IOException ioe) { + LOG.info("Error disposing of SASL client", ioe); + } + } + } + private synchronized boolean shouldAuthenticateOverKrb() throws IOException { + UserGroupInformation loginUser = UserGroupInformation.getLoginUser(); + UserGroupInformation currentUser = + UserGroupInformation.getCurrentUser(); + UserGroupInformation realUser = currentUser.getRealUser(); + return authMethod == AuthMethod.KERBEROS && + loginUser != null && + //Make sure user logged in using Kerberos either keytab or TGT + loginUser.hasKerberosCredentials() && + // relogin only in case it is the login user (e.g. JT) + // or superuser (like oozie). + (loginUser.equals(currentUser) || loginUser.equals(realUser)); + } + + private synchronized boolean setupSaslConnection(final InputStream in2, + final OutputStream out2) + throws IOException { + saslRpcClient = new HBaseSaslRpcClient(authMethod, token, serverPrincipal); + return saslRpcClient.saslConnect(in2, out2); + } + + /** + * If multiple clients with the same principal try to connect + * to the same server at the same time, the server assumes a + * replay attack is in progress. This is a feature of kerberos. + * In order to work around this, what is done is that the client + * backs off randomly and tries to initiate the connection + * again. + * The other problem is to do with ticket expiry. To handle that, + * a relogin is attempted. + */ + private synchronized void handleSaslConnectionFailure( + final int currRetries, + final int maxRetries, final Exception ex, final Random rand, + final UserGroupInformation user) + throws IOException, InterruptedException{ + user.doAs(new PrivilegedExceptionAction() { + public Object run() throws IOException, InterruptedException { + closeConnection(); + if (shouldAuthenticateOverKrb()) { + if (currRetries < maxRetries) { + LOG.debug("Exception encountered while connecting to " + + "the server : " + ex); + //try re-login + if (UserGroupInformation.isLoginKeytabBased()) { + UserGroupInformation.getLoginUser().reloginFromKeytab(); + } else { + UserGroupInformation.getLoginUser().reloginFromTicketCache(); + } + disposeSasl(); + //have granularity of milliseconds + //we are sleeping with the Connection lock held but since this + //connection instance is being used for connecting to the server + //in question, it is okay + Thread.sleep((rand.nextInt(reloginMaxBackoff) + 1)); + return null; + } else { + String msg = "Couldn't setup connection for " + + UserGroupInformation.getLoginUser().getUserName() + + " to " + serverPrincipal; + LOG.warn(msg); + throw (IOException) new IOException(msg).initCause(ex); + } + } else { + LOG.warn("Exception encountered while connecting to " + + "the server : " + ex); + } + if (ex instanceof RemoteException) + throw (RemoteException)ex; + throw new IOException(ex); + } + }); + } + + protected synchronized void setupIOstreams() + throws IOException, InterruptedException { + if (socket != null || shouldCloseConnection.get()) { + return; + } + + try { + if (LOG.isDebugEnabled()) { + LOG.debug("Connecting to "+server); + } + short numRetries = 0; + final short MAX_RETRIES = 5; + Random rand = null; + while (true) { + setupConnection(); + InputStream inStream = NetUtils.getInputStream(socket); + OutputStream outStream = NetUtils.getOutputStream(socket); + writeRpcHeader(outStream); + if (useSasl) { + final InputStream in2 = inStream; + final OutputStream out2 = outStream; + UserGroupInformation ticket = remoteId.getTicket(); + if (authMethod == AuthMethod.KERBEROS) {; + if (ticket != null && ticket.getRealUser() != null) { + ticket = ticket.getRealUser(); + } + } + boolean continueSasl = false; + try { + continueSasl = + ticket.doAs(new PrivilegedExceptionAction() { + @Override + public Boolean run() throws IOException { + return setupSaslConnection(in2, out2); + } + }); + } catch (Exception ex) { + if (rand == null) { + rand = new Random(); + } + handleSaslConnectionFailure(numRetries++, MAX_RETRIES, ex, rand, + ticket); + continue; + } + if (continueSasl) { + // Sasl connect is successful. Let's set up Sasl i/o streams. + inStream = saslRpcClient.getInputStream(inStream); + outStream = saslRpcClient.getOutputStream(outStream); + } else { + // fall back to simple auth because server told us so. + authMethod = AuthMethod.SIMPLE; + useSasl = false; + } + } + this.in = new DataInputStream(new BufferedInputStream + (new PingInputStream(inStream))); + this.out = new DataOutputStream + (new BufferedOutputStream(outStream)); + writeHeader(); + + // update last activity time + touch(); + + // start the receiver thread after the socket connection has been set up + start(); + return; + } + } catch (IOException e) { + markClosed(e); + close(); + + throw e; + } + } + + /* Write the RPC header */ + private void writeRpcHeader(OutputStream outStream) throws IOException { + DataOutputStream out = new DataOutputStream(new BufferedOutputStream(outStream)); + // Write out the header, version and authentication method + out.write(HBaseServer.HEADER.array()); + out.write(HBaseServer.CURRENT_VERSION); + authMethod.write(out); + out.flush(); + } + + /** + * Write the protocol header for each connection + * Out is not synchronized because only the first thread does this. + */ + private void writeHeader() throws IOException { + // Write out the ConnectionHeader + out.writeInt(header.getSerializedSize()); + header.writeTo(out); + } + + /** Close the connection. */ + protected synchronized void close() { + if (!shouldCloseConnection.get()) { + LOG.error("The connection is not in the closed state"); + return; + } + + // release the resources + // first thing to do;take the connection out of the connection list + synchronized (connections) { + if (connections.get(remoteId) == this) { + connections.remove(remoteId); + } + } + + // close the streams and therefore the socket + IOUtils.closeStream(out); + IOUtils.closeStream(in); + disposeSasl(); + + // clean up all calls + if (closeException == null) { + if (!calls.isEmpty()) { + LOG.warn( + "A connection is closed for no cause and calls are not empty"); + + // clean up calls anyway + closeException = new IOException("Unexpected closed connection"); + cleanupCalls(); + } + } else { + // log the info + if (LOG.isDebugEnabled()) { + LOG.debug("closing ipc connection to " + server + ": " + + closeException.getMessage(),closeException); + } + + // cleanup calls + cleanupCalls(); + } + if (LOG.isDebugEnabled()) + LOG.debug(getName() + ": closed"); + } + /* Initiates a call by sending the parameter to the remote server. * Note: this is not called from the Connection thread, but by other * threads. @@ -575,15 +855,8 @@ LOG.debug(getName() + " got value #" + id); Call call = calls.remove(id); - boolean isError = response.getError(); - if (isError) { - if (call != null) { - //noinspection ThrowableInstanceNeverThrown - call.setException(new RemoteException( - response.getException().getExceptionName(), - response.getException().getStackTrace())); - } - } else { + Status status = response.getStatus(); + if (status == Status.SUCCESS) { ByteString responseObj = response.getResponse(); DataInputStream dis = new DataInputStream(responseObj.newInput()); @@ -594,6 +867,18 @@ if (call != null) { call.setValue(value); } + } else if (status == Status.ERROR) { + if (call != null) { + //noinspection ThrowableInstanceNeverThrown + call.setException(new RemoteException( + response.getException().getExceptionName(), + response.getException().getStackTrace())); + } + } else if (status == Status.FATAL) { + // Close the connection + markClosed(new RemoteException( + response.getException().getExceptionName(), + response.getException().getStackTrace())); } } catch (IOException e) { if (e instanceof SocketTimeoutException && remoteId.rpcTimeout > 0) { @@ -620,47 +905,6 @@ } } - /** Close the connection. */ - protected synchronized void close() { - if (!shouldCloseConnection.get()) { - LOG.error("The connection is not in the closed state"); - return; - } - - // release the resources - // first thing to do;take the connection out of the connection list - synchronized (connections) { - connections.remove(remoteId, this); - } - - // close the streams and therefore the socket - IOUtils.closeStream(out); - IOUtils.closeStream(in); - - // clean up all calls - if (closeException == null) { - if (!calls.isEmpty()) { - LOG.warn( - "A connection is closed for no cause and calls are not empty"); - - // clean up calls anyway - closeException = new IOException("Unexpected closed connection"); - cleanupCalls(); - } - } else { - // log the info - if (LOG.isDebugEnabled()) { - LOG.debug("closing ipc connection to " + remoteId.address + ": " + - closeException.getMessage(),closeException); - } - - // cleanup calls - cleanupCalls(); - } - if (LOG.isDebugEnabled()) - LOG.debug(getName() + ": closed"); - } - /* Cleanup all calls and mark them as done */ protected void cleanupCalls() { cleanupCalls(0); @@ -877,7 +1121,7 @@ } public Writable call(Writable param, InetSocketAddress addr, - User ticket, int rpcTimeout) + UserGroupInformation ticket, int rpcTimeout) throws IOException, InterruptedException { return call(param, addr, null, ticket, rpcTimeout); } @@ -889,7 +1133,7 @@ * threw an exception. */ public Writable call(Writable param, InetSocketAddress addr, Class protocol, - User ticket, int rpcTimeout) + UserGroupInformation ticket, int rpcTimeout) throws InterruptedException, IOException { Call call = new Call(param); Connection connection = getConnection(addr, protocol, ticket, rpcTimeout, call); @@ -977,7 +1221,7 @@ * contains nulls for calls that timed out or errored. */ public Writable[] call(Writable[] params, InetSocketAddress[] addresses, Class protocol, - User ticket) + UserGroupInformation ticket) throws IOException, InterruptedException { if (addresses.length == 0) return new Writable[0]; @@ -1012,7 +1256,7 @@ * pool. Connections to a given host/port are reused. */ protected Connection getConnection(InetSocketAddress addr, Class protocol, - User ticket, + UserGroupInformation ticket, int rpcTimeout, Call call) throws IOException, InterruptedException { @@ -1050,14 +1294,14 @@ */ protected static class ConnectionId { final InetSocketAddress address; - final User ticket; + final UserGroupInformation ticket; final int rpcTimeout; Class protocol; private static final int PRIME = 16777619; ConnectionId(InetSocketAddress address, Class protocol, - User ticket, + UserGroupInformation ticket, int rpcTimeout) { this.protocol = protocol; this.address = address; @@ -1073,7 +1317,7 @@ return protocol; } - User getTicket() { + UserGroupInformation getTicket() { return ticket; } Index: src/main/java/org/apache/hadoop/hbase/ipc/Status.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/ipc/Status.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/ipc/Status.java (working copy) @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hbase.ipc; - -import org.apache.hadoop.classification.InterfaceAudience; - -/** - * Status of a Hadoop IPC call. - */ -@InterfaceAudience.Private -enum Status { - SUCCESS (0), - ERROR (1), - FATAL (-1); - - int state; - private Status(int state) { - this.state = state; - } -} Index: src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java (working copy) @@ -27,9 +27,9 @@ import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.RetriesExhaustedException; -import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.io.Writable; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ReflectionUtils; import javax.net.SocketFactory; import java.io.IOException; @@ -280,7 +280,7 @@ long clientVersion, InetSocketAddress addr, Configuration conf, SocketFactory factory, int rpcTimeout) throws IOException { return getProxy(protocol, clientVersion, addr, - User.getCurrent(), conf, factory, rpcTimeout); + UserGroupInformation.getCurrentUser(), conf, factory, rpcTimeout); } /** @@ -299,7 +299,7 @@ */ public static VersionedProtocol getProxy( Class protocol, - long clientVersion, InetSocketAddress addr, User ticket, + long clientVersion, InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, SocketFactory factory, int rpcTimeout) throws IOException { VersionedProtocol proxy = @@ -363,7 +363,7 @@ public static Object[] call(Method method, Object[][] params, InetSocketAddress[] addrs, Class protocol, - User ticket, + UserGroupInformation ticket, Configuration conf) throws IOException, InterruptedException { return getProtocolEngine(protocol, conf) Index: src/main/java/org/apache/hadoop/hbase/ipc/RequestContext.java =================================================================== --- src/main/java/org/apache/hadoop/hbase/ipc/RequestContext.java (revision 1326827) +++ src/main/java/org/apache/hadoop/hbase/ipc/RequestContext.java (working copy) @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.security.UserGroupInformation; import java.net.InetAddress; @@ -50,7 +50,7 @@ * null if no credentials were provided. * @return A User */ - public static User getRequestUser() { + public static UserGroupInformation getRequestUser() { RequestContext ctx = instance.get(); if (ctx != null) { return ctx.getUser(); @@ -63,9 +63,9 @@ * request or null if no user is set. */ public static String getRequestUserName() { - User user = getRequestUser(); + UserGroupInformation user = getRequestUser(); if (user != null) { - return user.getShortName(); + return user.getShortUserName(); } return null; } @@ -88,7 +88,7 @@ * @param remoteAddress * @param protocol */ - public static void set(User user, + public static void set(UserGroupInformation user, InetAddress remoteAddress, Class protocol) { RequestContext ctx = instance.get(); @@ -109,20 +109,20 @@ ctx.inRequest = false; } - private User user; + private UserGroupInformation user; private InetAddress remoteAddress; private Class protocol; // indicates we're within a RPC request invocation private boolean inRequest; - private RequestContext(User user, InetAddress remoteAddr, + private RequestContext(UserGroupInformation user, InetAddress remoteAddr, Class protocol) { this.user = user; this.remoteAddress = remoteAddr; this.protocol = protocol; } - public User getUser() { + public UserGroupInformation getUser() { return user; }