commit 6b7b8e1081a2675c2afe01b17d92e0a622c095d3 Author: Alan Gates Date: Fri Feb 13 10:56:25 2015 -0800 HIVE-9677 WIP, added grantRole and revokeRole, need to test. HIVE-9677 First pass at all the code. I doubt the tests pass. Need to write many more tests. HIVE-9677 WIP added revokePrivileges which I forgot before. HIVE-9677 Tests pass for the moment. Need to write _way_ more tests. HIVE-9677 Got the multi-level role stuff working. HIVE-9677 Added initial test for getUserPrivileges, fails beautifully. HIVE-9677 Bug fixes, works with global privileges now, still need tests for db and table privileges. HIVE-9677 Finally have all the unit tests passing. diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java index e7e0178..b2c98b3 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData; import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; @@ -35,18 +36,29 @@ import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData; import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; +import org.apache.hadoop.hive.metastore.api.HiveObjectType; +import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.LongColumnStatsData; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.StringColumnStatsData; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.model.MRoleMap; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -74,6 +86,8 @@ private static HTableInterface partTable; private static HTableInterface dbTable; private static HTableInterface roleTable; + private static HTableInterface globalPrivsTable; + private static HTableInterface principalRoleMapTable; private static Map emptyParameters = new HashMap(); @Rule public ExpectedException thrown = ExpectedException.none(); @@ -96,6 +110,12 @@ public static void startMiniCluster() throws Exception { HBaseReadWrite.CATALOG_CF); roleTable = utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING), HBaseReadWrite.CATALOG_CF); + globalPrivsTable = + utility.createTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + principalRoleMapTable = + utility.createTable(HBaseReadWrite.USER_TO_ROLE_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); } @AfterClass @@ -111,6 +131,8 @@ public void setupConnection() throws IOException { Mockito.when(hconn.getTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable); Mockito.when(hconn.getTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable); Mockito.when(hconn.getTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable); + Mockito.when(hconn.getTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE)).thenReturn(globalPrivsTable); + Mockito.when(hconn.getTable(HBaseReadWrite.USER_TO_ROLE_TABLE)).thenReturn(principalRoleMapTable); conf = new HiveConf(); // Turn off caching, as we want to test actual interaction with HBase conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true); @@ -432,58 +454,6 @@ public void alterPartitions() throws Exception { } } - // TODO - Fix this and the next test. They depend on test execution order and are bogus. - @Test - public void createManyPartitions() throws Exception { - String dbName = "default"; - String tableName = "manyParts"; - int startTime = (int)(System.currentTimeMillis() / 1000); - List cols = new ArrayList(); - cols.add(new FieldSchema("col1", "int", "nocomment")); - SerDeInfo serde = new SerDeInfo("serde", "seriallib", null); - StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, - serde, null, null, emptyParameters); - List partCols = new ArrayList(); - partCols.add(new FieldSchema("pc", "string", "")); - Table table = new Table(tableName, dbName, "me", startTime, startTime, 0, sd, partCols, - emptyParameters, null, null, null); - store.createTable(table); - - List partVals = Arrays.asList("alan", "bob", "carl", "doug", "ethan"); - for (String val : partVals) { - List vals = new ArrayList(); - vals.add(val); - StorageDescriptor psd = new StorageDescriptor(sd); - psd.setLocation("file:/tmp/pc=" + val); - Partition part = new Partition(vals, dbName, tableName, startTime, startTime, psd, - emptyParameters); - store.addPartition(part); - - Partition p = store.getPartition(dbName, tableName, vals); - Assert.assertEquals("file:/tmp/pc=" + val, p.getSd().getLocation()); - } - - Assert.assertEquals(2, HBaseReadWrite.getInstance(conf).countStorageDescriptor()); - - } - - @Test - public void createDifferentPartition() throws Exception { - int startTime = (int)(System.currentTimeMillis() / 1000); - Map emptyParameters = new HashMap(); - List cols = new ArrayList(); - cols.add(new FieldSchema("col1", "int", "nocomment")); - SerDeInfo serde = new SerDeInfo("serde", "seriallib", null); - StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input2", "output", false, 0, - serde, null, null, emptyParameters); - Table table = new Table("differenttable", "default", "me", startTime, startTime, 0, sd, null, - emptyParameters, null, null, null); - store.createTable(table); - - Assert.assertEquals(3, HBaseReadWrite.getInstance(conf).countStorageDescriptor()); - - } - @Test public void getPartitions() throws Exception { String dbName = "default"; @@ -685,6 +655,437 @@ public void dropRole() throws Exception { } @Test + public void grantRevokeRoles() throws Exception { + int now = (int)(System.currentTimeMillis()/1000); + String roleName1 = "role1"; + store.addRole(roleName1, "me"); + String roleName2 = "role2"; + store.addRole(roleName2, "me"); + + Role role1 = store.getRole(roleName1); + Role role2 = store.getRole(roleName2); + + store.grantRole(role1, "fred", PrincipalType.USER, "bob", PrincipalType.USER, false); + store.grantRole(role2, roleName1, PrincipalType.ROLE, "admin", PrincipalType.ROLE, true); + store.grantRole(role2, "fred", PrincipalType.USER, "admin", PrincipalType.ROLE, false); + + List maps = store.listRoles("fred", PrincipalType.USER); + Assert.assertEquals(3, maps.size()); + boolean sawRole1 = false, sawRole2 = false, sawPublic = false; + for (MRoleMap map : maps) { + if (map.getRole().getRoleName().equals(roleName1)) { + sawRole1 = true; + Assert.assertEquals("fred", map.getPrincipalName()); + Assert.assertEquals(PrincipalType.USER.toString(), map.getPrincipalType()); + Assert.assertTrue(map.getAddTime() >= now); + Assert.assertEquals("bob", map.getGrantor()); + Assert.assertEquals(PrincipalType.USER.toString(), map.getGrantorType()); + Assert.assertFalse(map.getGrantOption()); + } else if (map.getRole().getRoleName().equals(roleName2)) { + sawRole2 = true; + Assert.assertEquals("fred", map.getPrincipalName()); + Assert.assertEquals(PrincipalType.USER.toString(), map.getPrincipalType()); + LOG.debug("now " + now + " add time " + map.getAddTime()); + Assert.assertTrue(map.getAddTime() >= now); + Assert.assertEquals("admin", map.getGrantor()); + Assert.assertEquals(PrincipalType.ROLE.toString(), map.getGrantorType()); + Assert.assertFalse(map.getGrantOption()); + } else if (map.getRole().getRoleName().equals(HiveMetaStore.PUBLIC)) { + sawPublic = true; + Assert.assertEquals("fred", map.getPrincipalName()); + Assert.assertEquals(PrincipalType.USER.toString(), map.getPrincipalType()); + Assert.assertFalse(map.getGrantOption()); + } else { + Assert.fail("Unknown role name " + map.getRole().getRoleName()); + } + } + Assert.assertTrue(sawRole1 && sawRole2 && sawPublic); + + maps = store.listRoles("fred", PrincipalType.ROLE); + Assert.assertEquals(0, maps.size()); + + maps = store.listRoles(roleName1, PrincipalType.ROLE); + Assert.assertEquals(1, maps.size()); + MRoleMap map = maps.get(0); + Assert.assertEquals(roleName1, map.getPrincipalName()); + Assert.assertEquals(PrincipalType.ROLE.toString(), map.getPrincipalType()); + Assert.assertEquals(roleName2, map.getRole().getRoleName()); + Assert.assertTrue(map.getAddTime() <= now); + Assert.assertEquals("admin", map.getGrantor()); + Assert.assertEquals(PrincipalType.ROLE.toString(), map.getGrantorType()); + Assert.assertTrue(map.getGrantOption()); + + // Test listing all members in a role + maps = store.listRoleMembers(roleName1); + Assert.assertEquals(1, maps.size()); + Assert.assertEquals("fred", maps.get(0).getPrincipalName()); + Assert.assertEquals(PrincipalType.USER.toString(), maps.get(0).getPrincipalType()); + Assert.assertTrue(maps.get(0).getAddTime() >= now); + Assert.assertEquals("bob", maps.get(0).getGrantor()); + Assert.assertEquals(PrincipalType.USER.toString(), maps.get(0).getGrantorType()); + Assert.assertFalse(maps.get(0).getGrantOption()); + + maps = store.listRoleMembers(roleName2); + Assert.assertEquals(2, maps.size()); + boolean sawFred = false; + sawRole1 = false; + for (MRoleMap m : maps) { + if ("fred".equals(m.getPrincipalName())) sawFred = true; + else if (roleName1.equals(m.getPrincipalName())) sawRole1 = true; + else Assert.fail("Unexpected principal " + m.getPrincipalName()); + } + Assert.assertTrue(sawFred && sawRole1); + + // Revoke a role with grant option, make sure it just goes to no grant option + store.revokeRole(role2, roleName1, PrincipalType.ROLE, true); + maps = store.listRoles(roleName1, PrincipalType.ROLE); + Assert.assertEquals(1, maps.size()); + Assert.assertEquals(roleName2, maps.get(0).getRole().getRoleName()); + Assert.assertFalse(maps.get(0).getGrantOption()); + + // Drop a role, make sure it is properly removed from the map + store.removeRole(roleName1); + maps = store.listRoles("fred", PrincipalType.USER); + Assert.assertEquals(2, maps.size()); + sawRole2 = sawPublic = false; + for (MRoleMap m : maps) { + if (m.getRole().getRoleName().equals(roleName2)) sawRole2 = true; + else if (m.getRole().getRoleName().equals(HiveMetaStore.PUBLIC)) sawPublic = true; + else Assert.fail("Unknown role " + m.getRole().getRoleName()); + } + Assert.assertTrue(sawRole2 && sawPublic); + maps = store.listRoles(roleName1, PrincipalType.ROLE); + Assert.assertEquals(0, maps.size()); + + // Revoke a role without grant option, make sure it goes away + store.revokeRole(role2, "fred", PrincipalType.USER, false); + maps = store.listRoles("fred", PrincipalType.USER); + Assert.assertEquals(1, maps.size()); + Assert.assertEquals(HiveMetaStore.PUBLIC, maps.get(0).getRole().getRoleName()); + } + + @Test + public void userToRoleMap() throws Exception { + String roleName1 = "utrm1"; + store.addRole(roleName1, "me"); + String roleName2 = "utrm2"; + store.addRole(roleName2, "me"); + String user1 = "wilma"; + String user2 = "betty"; + + Role role1 = store.getRole(roleName1); + Role role2 = store.getRole(roleName2); + + store.grantRole(role1, user1, PrincipalType.USER, "bob", PrincipalType.USER, false); + store.grantRole(role1, roleName2, PrincipalType.ROLE, "admin", PrincipalType.ROLE, true); + + List roles = HBaseReadWrite.getInstance(conf).getUserRoles(user1); + Assert.assertEquals(2, roles.size()); + String[] roleNames = roles.toArray(new String[roles.size()]); + Arrays.sort(roleNames); + Assert.assertArrayEquals(new String[]{roleName1, roleName2}, roleNames); + + store.grantRole(role2, user1, PrincipalType.USER, "admin", PrincipalType.ROLE, false); + store.grantRole(role1, user2, PrincipalType.USER, "bob", PrincipalType.USER, false); + + roles = HBaseReadWrite.getInstance(conf).getUserRoles(user2); + Assert.assertEquals(2, roles.size()); + roleNames = roles.toArray(new String[roles.size()]); + Arrays.sort(roleNames); + Assert.assertArrayEquals(new String[]{roleName1, roleName2}, roleNames); + + store.revokeRole(role1, roleName2, PrincipalType.ROLE, false); + + // user1 should still have both roles since she was granted into role1 specifically. user2 + // should only have role2 now since role2 was revoked from role1. + roles = HBaseReadWrite.getInstance(conf).getUserRoles(user1); + Assert.assertEquals(2, roles.size()); + roleNames = roles.toArray(new String[roles.size()]); + Arrays.sort(roleNames); + Assert.assertArrayEquals(new String[]{roleName1, roleName2}, roleNames); + + roles = HBaseReadWrite.getInstance(conf).getUserRoles(user2); + Assert.assertEquals(1, roles.size()); + Assert.assertEquals(roleName1, roles.get(0)); + } + + @Test + public void userToRoleMapOnDrop() throws Exception { + String roleName1 = "utrmod1"; + store.addRole(roleName1, "me"); + String roleName2 = "utrmod2"; + store.addRole(roleName2, "me"); + String user1 = "pebbles"; + String user2 = "bam-bam"; + + Role role1 = store.getRole(roleName1); + Role role2 = store.getRole(roleName2); + + store.grantRole(role1, user1, PrincipalType.USER, "bob", PrincipalType.USER, false); + store.grantRole(role1, roleName2, PrincipalType.ROLE, "admin", PrincipalType.ROLE, true); + store.grantRole(role1, user2, PrincipalType.USER, "bob", PrincipalType.USER, false); + + List roles = HBaseReadWrite.getInstance(conf).getUserRoles(user2); + Assert.assertEquals(2, roles.size()); + String[] roleNames = roles.toArray(new String[roles.size()]); + Arrays.sort(roleNames); + Assert.assertArrayEquals(new String[]{roleName1, roleName2}, roleNames); + + store.removeRole(roleName2); + + roles = HBaseReadWrite.getInstance(conf).getUserRoles(user1); + Assert.assertEquals(1, roles.size()); + Assert.assertEquals(roleName1, roles.get(0)); + + roles = HBaseReadWrite.getInstance(conf).getUserRoles(user2); + Assert.assertEquals(1, roles.size()); + Assert.assertEquals(roleName1, roles.get(0)); + } + + @Test + public void grantRevokeGlobalPrivileges() throws Exception { + doGrantRevoke(HiveObjectType.GLOBAL, null, null, new String[] {"grpg1", "grpg2"}, + new String[] {"bugs", "elmer", "daphy", "wiley"}); + } + + @Test + public void grantRevokeDbPrivileges() throws Exception { + String dbName = "grdbp_db"; + try { + Database db = new Database(dbName, "no description", "file:///tmp", emptyParameters); + store.createDatabase(db); + doGrantRevoke(HiveObjectType.DATABASE, dbName, null, + new String[] {"grdbp_role1", "grdbp_role2"}, + new String[] {"fred", "barney", "wilma", "betty"}); + } finally { + store.dropDatabase(dbName); + } + } + + @Test + public void grantRevokeTablePrivileges() throws Exception { + String dbName = "grtp_db"; + String tableName = "grtp_table"; + try { + Database db = new Database(dbName, "no description", "file:///tmp", emptyParameters); + store.createDatabase(db); + int startTime = (int)(System.currentTimeMillis() / 1000); + List cols = new ArrayList(); + cols.add(new FieldSchema("col1", "int", "nocomment")); + SerDeInfo serde = new SerDeInfo("serde", "seriallib", null); + StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, + serde, null, null, emptyParameters); + Table table = new Table(tableName, dbName, "me", startTime, startTime, 0, sd, null, + emptyParameters, null, null, null); + store.createTable(table); + doGrantRevoke(HiveObjectType.TABLE, dbName, tableName, + new String[] {"grtp_role1", "grtp_role2"}, + new String[] {"batman", "robin", "superman", "wonderwoman"}); + + } finally { + if (store.getTable(dbName, tableName) != null) store.dropTable(dbName, tableName); + store.dropDatabase(dbName); + } + } + + private void doGrantRevoke(HiveObjectType objectType, String dbName, String tableName, + String[] roleNames, String[] userNames) + throws Exception { + store.addRole(roleNames[0], "me"); + store.addRole(roleNames[1], "me"); + int now = (int)(System.currentTimeMillis() / 1000); + + Role role1 = store.getRole(roleNames[0]); + Role role2 = store.getRole(roleNames[1]); + store.grantRole(role1, userNames[0], PrincipalType.USER, "bob", PrincipalType.USER, false); + store.grantRole(role1, roleNames[1], PrincipalType.ROLE, "admin", PrincipalType.ROLE, true); + store.grantRole(role2, userNames[1], PrincipalType.USER, "bob", PrincipalType.USER, false); + + List privileges = new ArrayList(); + HiveObjectRef hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + PrivilegeGrantInfo grantInfo = + new PrivilegeGrantInfo("read", now, "me", PrincipalType.USER, false); + HiveObjectPrivilege hop = new HiveObjectPrivilege(hiveObjRef, userNames[0], PrincipalType.USER, + grantInfo); + privileges.add(hop); + + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("write", now, "me", PrincipalType.USER, true); + hop = new HiveObjectPrivilege(hiveObjRef, roleNames[0], PrincipalType.ROLE, grantInfo); + privileges.add(hop); + + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("exec", now, "me", PrincipalType.USER, false); + hop = new HiveObjectPrivilege(hiveObjRef, roleNames[1], PrincipalType.ROLE, grantInfo); + privileges.add(hop); + + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("create", now, "me", PrincipalType.USER, true); + hop = new HiveObjectPrivilege(hiveObjRef, userNames[2], PrincipalType.USER, grantInfo); + privileges.add(hop); + + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("create2", now, "me", PrincipalType.USER, true); + hop = new HiveObjectPrivilege(hiveObjRef, userNames[2], PrincipalType.USER, grantInfo); + privileges.add(hop); + + PrivilegeBag pBag = new PrivilegeBag(privileges); + store.grantPrivileges(pBag); + + PrincipalPrivilegeSet pps = getPPS(objectType, dbName, tableName, userNames[0]); + + Assert.assertEquals(1, pps.getUserPrivilegesSize()); + Assert.assertEquals(1, pps.getUserPrivileges().get(userNames[0]).size()); + grantInfo = pps.getUserPrivileges().get(userNames[0]).get(0); + Assert.assertEquals("read", grantInfo.getPrivilege()); + Assert.assertTrue(now <= grantInfo.getCreateTime()); + Assert.assertEquals("me", grantInfo.getGrantor()); + Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType()); + Assert.assertFalse(grantInfo.isGrantOption()); + + Assert.assertEquals(2, pps.getRolePrivilegesSize()); + Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[0]).size()); + grantInfo = pps.getRolePrivileges().get(roleNames[0]).get(0); + Assert.assertEquals("write", grantInfo.getPrivilege()); + Assert.assertTrue(now <= grantInfo.getCreateTime()); + Assert.assertEquals("me", grantInfo.getGrantor()); + Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType()); + Assert.assertTrue(grantInfo.isGrantOption()); + + Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[1]).size()); + grantInfo = pps.getRolePrivileges().get(roleNames[1]).get(0); + Assert.assertEquals("exec", grantInfo.getPrivilege()); + Assert.assertTrue(now <= grantInfo.getCreateTime()); + Assert.assertEquals("me", grantInfo.getGrantor()); + Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType()); + Assert.assertFalse(grantInfo.isGrantOption()); + + pps = getPPS(objectType, dbName, tableName, userNames[1]); + + Assert.assertEquals(0, pps.getUserPrivilegesSize()); + + Assert.assertEquals(1, pps.getRolePrivilegesSize()); + Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[1]).size()); + grantInfo = pps.getRolePrivileges().get(roleNames[1]).get(0); + Assert.assertEquals("exec", grantInfo.getPrivilege()); + Assert.assertTrue(now <= grantInfo.getCreateTime()); + Assert.assertEquals("me", grantInfo.getGrantor()); + Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType()); + Assert.assertFalse(grantInfo.isGrantOption()); + + pps = getPPS(objectType, dbName, tableName, userNames[2]); + + Assert.assertEquals(1, pps.getUserPrivilegesSize()); + Assert.assertEquals(2, pps.getUserPrivileges().get(userNames[2]).size()); + Assert.assertEquals(0, pps.getRolePrivilegesSize()); + + pps = getPPS(objectType, dbName, tableName, userNames[3]); + Assert.assertEquals(0, pps.getUserPrivilegesSize()); + Assert.assertEquals(0, pps.getRolePrivilegesSize()); + + // Test that removing role removes the role grants + store.removeRole(roleNames[1]); + checkRoleRemovedFromAllPrivileges(objectType, dbName, tableName, roleNames[1]); + pps = getPPS(objectType, dbName, tableName, userNames[0]); + + Assert.assertEquals(1, pps.getRolePrivilegesSize()); + Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[0]).size()); + + pps = getPPS(objectType, dbName, tableName, userNames[1]); + + Assert.assertEquals(0, pps.getRolePrivilegesSize()); + + // Test that revoking with grant option = true just removes grant option + privileges.clear(); + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("write", now, "me", PrincipalType.USER, true); + hop = new HiveObjectPrivilege(hiveObjRef, roleNames[0], PrincipalType.ROLE, grantInfo); + privileges.add(hop); + + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("create2", now, "me", PrincipalType.USER, true); + hop = new HiveObjectPrivilege(hiveObjRef, userNames[2], PrincipalType.USER, grantInfo); + privileges.add(hop); + + pBag = new PrivilegeBag(privileges); + store.revokePrivileges(pBag, true); + pps = getPPS(objectType, dbName, tableName, userNames[0]); + + Assert.assertEquals(1, pps.getRolePrivilegesSize()); + Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[0]).size()); + grantInfo = pps.getRolePrivileges().get(roleNames[0]).get(0); + Assert.assertEquals("write", grantInfo.getPrivilege()); + Assert.assertTrue(now <= grantInfo.getCreateTime()); + Assert.assertEquals("me", grantInfo.getGrantor()); + Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType()); + Assert.assertFalse(grantInfo.isGrantOption()); + + pps = getPPS(objectType, dbName, tableName, userNames[2]); + + Assert.assertEquals(1, pps.getUserPrivilegesSize()); + Assert.assertEquals(2, pps.getUserPrivileges().get(userNames[2]).size()); + for (PrivilegeGrantInfo pgi : pps.getUserPrivileges().get(userNames[2])) { + if (pgi.getPrivilege().equals("create")) Assert.assertTrue(pgi.isGrantOption()); + else if (pgi.getPrivilege().equals("create2")) Assert.assertFalse(pgi.isGrantOption()); + else Assert.fail("huh?"); + } + + // Test revoking revokes + store.revokePrivileges(pBag, false); + + pps = getPPS(objectType, dbName, tableName, userNames[0]); + + Assert.assertEquals(1, pps.getUserPrivilegesSize()); + Assert.assertEquals(1, pps.getRolePrivilegesSize()); + Assert.assertEquals(0, pps.getRolePrivileges().get(roleNames[0]).size()); + + pps = getPPS(objectType, dbName, tableName, userNames[2]); + Assert.assertEquals(1, pps.getUserPrivilegesSize()); + Assert.assertEquals(1, pps.getUserPrivileges().get(userNames[2]).size()); + Assert.assertEquals("create", pps.getUserPrivileges().get(userNames[2]).get(0).getPrivilege()); + Assert.assertEquals(0, pps.getRolePrivilegesSize()); + } + + private PrincipalPrivilegeSet getPPS(HiveObjectType objectType, String dbName, String tableName, + String userName) + throws InvalidObjectException, MetaException { + switch (objectType) { + case GLOBAL: return store.getUserPrivilegeSet(userName, null); + case DATABASE: return store.getDBPrivilegeSet(dbName, userName, null); + case TABLE: return store.getTablePrivilegeSet(dbName, tableName, userName, null); + default: throw new RuntimeException("huh?"); + } + } + + private void checkRoleRemovedFromAllPrivileges(HiveObjectType objectType, String dbName, + String tableName, String roleName) + throws IOException, NoSuchObjectException, MetaException { + List pgi = null; + switch (objectType) { + case GLOBAL: + pgi = HBaseReadWrite.getInstance(conf).getGlobalPrivs().getRolePrivileges().get(roleName); + break; + + case DATABASE: + pgi = store.getDatabase(dbName).getPrivileges().getRolePrivileges().get(roleName); + break; + + case TABLE: + pgi = store.getTable(dbName, tableName).getPrivileges().getRolePrivileges().get(roleName); + break; + + default: + Assert.fail(); + } + + Assert.assertNull("Expected null for role " + roleName + " for type " + objectType.toString() + + " with db " + dbName + " and table " + tableName, pgi); + + + + } + + @Test public void tableStatistics() throws Exception { long now = System.currentTimeMillis(); String dbname = "default"; diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java new file mode 100644 index 0000000..b9b54a2 --- /dev/null +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java @@ -0,0 +1,159 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.metastore.hbase; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.client.HConnection; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Integration tests with HBase Mini-cluster for HBaseStore + */ +public class TestStorageDescriptorSharing { + + private static final Log LOG = LogFactory.getLog(TestHBaseStoreIntegration.class.getName()); + + private static HBaseTestingUtility utility; + private static HTableInterface tblTable; + private static HTableInterface sdTable; + private static HTableInterface partTable; + private static HTableInterface dbTable; + private static HTableInterface roleTable; + private static HTableInterface globalPrivsTable; + private static HTableInterface principalRoleMapTable; + private static Map emptyParameters = new HashMap(); + + @Rule public ExpectedException thrown = ExpectedException.none(); + @Mock private HConnection hconn; + private HBaseStore store; + private HiveConf conf; + + @BeforeClass + public static void startMiniCluster() throws Exception { + utility = new HBaseTestingUtility(); + utility.startMiniCluster(); + byte[][] families = new byte[][] {HBaseReadWrite.CATALOG_CF, HBaseReadWrite.STATS_CF}; + tblTable = utility.createTable(HBaseReadWrite.TABLE_TABLE.getBytes(HBaseUtils.ENCODING), + families); + sdTable = utility.createTable(HBaseReadWrite.SD_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + partTable = utility.createTable(HBaseReadWrite.PART_TABLE.getBytes(HBaseUtils.ENCODING), + families); + dbTable = utility.createTable(HBaseReadWrite.DB_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + roleTable = utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + globalPrivsTable = + utility.createTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + principalRoleMapTable = + utility.createTable(HBaseReadWrite.USER_TO_ROLE_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + } + + @AfterClass + public static void shutdownMiniCluster() throws Exception { + utility.shutdownMiniCluster(); + } + + @Before + public void setupConnection() throws IOException { + MockitoAnnotations.initMocks(this); + Mockito.when(hconn.getTable(HBaseReadWrite.SD_TABLE)).thenReturn(sdTable); + Mockito.when(hconn.getTable(HBaseReadWrite.TABLE_TABLE)).thenReturn(tblTable); + Mockito.when(hconn.getTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable); + Mockito.when(hconn.getTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable); + Mockito.when(hconn.getTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable); + Mockito.when(hconn.getTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE)).thenReturn(globalPrivsTable); + Mockito.when(hconn.getTable(HBaseReadWrite.USER_TO_ROLE_TABLE)).thenReturn(principalRoleMapTable); + conf = new HiveConf(); + // Turn off caching, as we want to test actual interaction with HBase + conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true); + HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf); + hbase.setConnection(hconn); + store = new HBaseStore(); + store.setConf(conf); + } + + @Test + public void createManyPartitions() throws Exception { + String dbName = "default"; + String tableName = "manyParts"; + int startTime = (int)(System.currentTimeMillis() / 1000); + List cols = new ArrayList(); + cols.add(new FieldSchema("col1", "int", "nocomment")); + SerDeInfo serde = new SerDeInfo("serde", "seriallib", null); + StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, + serde, null, null, emptyParameters); + List partCols = new ArrayList(); + partCols.add(new FieldSchema("pc", "string", "")); + Table table = new Table(tableName, dbName, "me", startTime, startTime, 0, sd, partCols, + emptyParameters, null, null, null); + store.createTable(table); + + List partVals = Arrays.asList("alan", "bob", "carl", "doug", "ethan"); + for (String val : partVals) { + List vals = new ArrayList(); + vals.add(val); + StorageDescriptor psd = new StorageDescriptor(sd); + psd.setLocation("file:/tmp/pc=" + val); + Partition part = new Partition(vals, dbName, tableName, startTime, startTime, psd, + emptyParameters); + store.addPartition(part); + + Partition p = store.getPartition(dbName, tableName, vals); + Assert.assertEquals("file:/tmp/pc=" + val, p.getSd().getLocation()); + } + + Assert.assertEquals(1, HBaseReadWrite.getInstance(conf).countStorageDescriptor()); + + sd = new StorageDescriptor(cols, "file:/tmp", "input2", "output", false, 0, + serde, null, null, emptyParameters); + table = new Table("differenttable", "default", "me", startTime, startTime, 0, sd, null, + emptyParameters, null, null, null); + store.createTable(table); + + Assert.assertEquals(2, HBaseReadWrite.getInstance(conf).countStorageDescriptor()); + + } +} diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/DatabaseWritable.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/DatabaseWritable.java index 4a75acf..67268e0 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/DatabaseWritable.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/DatabaseWritable.java @@ -47,7 +47,7 @@ public void write(DataOutput out) throws IOException { HBaseUtils.writeStrStrMap(out, db.getParameters()); HBaseUtils.writePrivileges(out, db.getPrivileges()); HBaseUtils.writeStr(out, db.getOwnerName()); - HBaseUtils.writePrincipalType(out, db.getOwnerType()); + HBaseUtils.writeEnum(out, db.getOwnerType()); } @Override diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoList.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoList.java new file mode 100644 index 0000000..cde1b78 --- /dev/null +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoList.java @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.metastore.hbase; + +import org.apache.hadoop.io.Writable; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * A class to serialize a list of grant infos. There is not a corresponding thrift object. + */ +public class GrantInfoList implements Writable{ + List grantInfos; + + GrantInfoList() { + grantInfos = new ArrayList(); + } + + GrantInfoList(List infos) { + grantInfos = infos; + } + + + @Override + public void write(DataOutput out) throws IOException { + if (grantInfos == null) { + out.writeInt(0); + } else { + out.writeInt(grantInfos.size()); + for (GrantInfoWritable info : grantInfos) { + info.write(out); + } + } + } + + @Override + public void readFields(DataInput in) throws IOException { + int size = in.readInt(); + if (size == 0) { + grantInfos = new ArrayList(); + } else { + grantInfos = new ArrayList(size); + for (int i = 0; i < size; i++) { + GrantInfoWritable info = new GrantInfoWritable(); + info.readFields(in); + grantInfos.add(info); + } + } + } +} diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoWritable.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoWritable.java new file mode 100644 index 0000000..2880410 --- /dev/null +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoWritable.java @@ -0,0 +1,82 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.metastore.hbase; + +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.io.Writable; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * A class to serialize grant information. There is not a corresponding thrift object. + */ +class GrantInfoWritable implements Writable { + String principalName; + PrincipalType principalType; + int addTime; + String grantor; + PrincipalType grantorType; + boolean grantOption; + + GrantInfoWritable() { + } + + /** + * + * @param name name of the user or role + * @param type whether this is a user or a role + * @param addTime time user was added to role + * @param grantor user or role who granted this principal into the role + * @param grantorType whether the grantor was a user or a role + * @param withGrantOption whether this principal has the grant option + */ + GrantInfoWritable(String name, PrincipalType type, int addTime, String grantor, + PrincipalType grantorType, boolean withGrantOption) { + principalName = name; + principalType = type; + this.addTime = addTime; + this.grantor = grantor; + this.grantorType = grantorType; + grantOption = withGrantOption; + } + + @Override + public void write(DataOutput out) throws IOException { + HBaseUtils.writeStr(out, principalName); + out.writeInt(principalType.getValue()); + out.writeInt(addTime); + HBaseUtils.writeStr(out, grantor); + out.writeInt(grantorType.getValue()); + out.writeBoolean(grantOption); + } + + @Override + public void readFields(DataInput in) throws IOException { + principalName = HBaseUtils.readStr(in); + principalType = PrincipalType.findByValue(in.readInt()); + addTime = in.readInt(); + grantor = HBaseUtils.readStr(in); + grantorType = PrincipalType.findByValue(in.readInt()); + grantOption = in.readBoolean(); + } +} diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java index e0ccbd5..acd090b 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.metastore.hbase; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -49,10 +48,15 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.io.Writable; +import java.io.DataInput; +import java.io.DataOutput; import java.io.IOException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -62,9 +66,11 @@ import java.util.Collection; import java.util.Deque; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; /** @@ -73,21 +79,23 @@ class HBaseReadWrite { @VisibleForTesting final static String DB_TABLE = "DBS"; + @VisibleForTesting final static String GLOBAL_PRIVS_TABLE = "GLOBAL_PRIVS"; @VisibleForTesting final static String PART_TABLE = "PARTITIONS"; @VisibleForTesting final static String ROLE_TABLE = "ROLES"; @VisibleForTesting final static String SD_TABLE = "SDS"; @VisibleForTesting final static String TABLE_TABLE = "TBLS"; + @VisibleForTesting final static String USER_TO_ROLE_TABLE = "USER_TO_ROLE"; @VisibleForTesting final static byte[] CATALOG_CF = "c".getBytes(HBaseUtils.ENCODING); @VisibleForTesting final static byte[] STATS_CF = "s".getBytes(HBaseUtils.ENCODING); @VisibleForTesting final static String NO_CACHE_CONF = "no.use.cache"; private final static byte[] CATALOG_COL = "cat".getBytes(HBaseUtils.ENCODING); + private final static byte[] ROLES_COL = "roles".getBytes(HBaseUtils.ENCODING); private final static byte[] REF_COUNT_COL = "ref".getBytes(HBaseUtils.ENCODING); + private final static byte[] GLOBAL_PRIVS_KEY = "globalprivs".getBytes(HBaseUtils.ENCODING); private final static int TABLES_TO_CACHE = 10; - // TODO Add privileges as a second column in the CATALOG_CF - - private final static String[] tableNames = { DB_TABLE, PART_TABLE, ROLE_TABLE, SD_TABLE, - TABLE_TABLE }; + private final static String[] tableNames = { DB_TABLE, GLOBAL_PRIVS_TABLE, PART_TABLE, + USER_TO_ROLE_TABLE, ROLE_TABLE, SD_TABLE, TABLE_TABLE }; static final private Log LOG = LogFactory.getLog(HBaseReadWrite.class.getName()); private static ThreadLocal self = new ThreadLocal() { @@ -121,6 +129,11 @@ protected HBaseReadWrite initialValue() { private Counter sdMisses; private Counter sdOverflows; private List counters; + // roleCache doesn't use ObjectCache because I don't want to limit the size. I am assuming + // that the number of roles will always be small (< 100) so caching the whole thing should not + // be painful. + private Map roleCache; + boolean entireRoleTableInCache; /** * Get the instance of HBaseReadWrite for the current thread. This is intended to be used by @@ -199,6 +212,9 @@ private HBaseReadWrite(Configuration configuration) { partCache = new PartitionCache(totalObjectsToCache / 2, partHits, partMisses, partOverflows); statsCache = StatsCache.getInstance(conf); } + + roleCache = new HashMap(); + entireRoleTableInCache = false; } // Synchronize this so not everyone's doing it at once. @@ -222,6 +238,10 @@ static synchronized void createTablesIfNotExist() throws IOException { } } + /********************************************************************************************** + * Transaction related methods + *********************************************************************************************/ + /** * Begin a transaction */ @@ -245,6 +265,10 @@ void close() throws IOException { conn.close(); } + /********************************************************************************************** + * Database related methods + *********************************************************************************************/ + /** * Fetch a database object * @param name name of the database to fetch @@ -307,6 +331,37 @@ void deleteDb(String name) throws IOException { flush(); } + /********************************************************************************************** + * Global privilege related methods + *********************************************************************************************/ + + /** + * Fetch the global privileges object + * @return + * @throws IOException + */ + PrincipalPrivilegeSet getGlobalPrivs() throws IOException { + byte[] key = GLOBAL_PRIVS_KEY; + byte[] serialized = read(GLOBAL_PRIVS_TABLE, key, CATALOG_CF, CATALOG_COL); + if (serialized == null) return null; + return HBaseUtils.readPrivileges(serialized); + } + + /** + * Store the global privileges object + * @throws IOException + */ + void putGlobalPrivs(PrincipalPrivilegeSet privs) throws IOException { + byte[] key = GLOBAL_PRIVS_KEY; + byte[] serialized = HBaseUtils.writePrivileges(privs); + store(GLOBAL_PRIVS_TABLE, key, CATALOG_CF, CATALOG_COL, serialized); + flush(); + } + + /********************************************************************************************** + * Partition related methods + *********************************************************************************************/ + /** * Fetch one partition * @param dbName database table is in @@ -483,6 +538,335 @@ void deletePartition(String dbName, String tableName, List partVals) thr flush(); } + private Partition getPartition(String dbName, String tableName, List partVals, + boolean populateCache) throws IOException { + Partition cached = partCache.get(dbName, tableName, partVals); + if (cached != null) return cached; + byte[] key = buildPartitionKey(dbName, tableName, partVals); + byte[] serialized = read(PART_TABLE, key, CATALOG_CF, CATALOG_COL); + if (serialized == null) return null; + PartitionWritable part = new PartitionWritable(); + HBaseUtils.deserialize(part, serialized); + if (populateCache) partCache.put(dbName, tableName, part.part); + return part.part; + } + + + private List scanPartitions(byte[] keyPrefix, byte[] colFam, byte[] colName, + int maxResults) throws IOException { + return scanPartitionsWithFilter(keyPrefix, colFam, colName, maxResults, null); + } + + private List scanPartitionsWithFilter(byte[] keyPrefix, byte[] colFam, byte[] colName, + int maxResults, Filter filter) + throws IOException { + Iterator iter = + scanWithFilter(PART_TABLE, keyPrefix, colFam, colName, filter); + List parts = new ArrayList(); + int numToFetch = maxResults < 0 ? Integer.MAX_VALUE : maxResults; + for (int i = 0; i < numToFetch && iter.hasNext(); i++) { + PartitionWritable p = new PartitionWritable(); + HBaseUtils.deserialize(p, iter.next().getValue(colFam, colName)); + parts.add(p.part); + } + return parts; + } + + private byte[] buildPartitionKey(String dbName, String tableName, List partVals) { + Deque keyParts = new ArrayDeque(partVals); + keyParts.addFirst(tableName); + keyParts.addFirst(dbName); + return HBaseUtils.buildKey(keyParts.toArray(new String[keyParts.size()])); + } + + private byte[] buildPartitionKey(PartitionWritable part) throws IOException { + Deque keyParts = new ArrayDeque(part.part.getValues()); + keyParts.addFirst(part.part.getTableName()); + keyParts.addFirst(part.part.getDbName()); + return HBaseUtils.buildKey(keyParts.toArray(new String[keyParts.size()])); + } + + /********************************************************************************************** + * Role related methods + *********************************************************************************************/ + + /** + * Fetch the list of all roles for a user + * @param userName name of the user + * @return the list of all roles this user participates in + * @throws IOException + */ + List getUserRoles(String userName) throws IOException { + byte[] key = HBaseUtils.buildKey(userName); + byte[] serialized = read(USER_TO_ROLE_TABLE, key, CATALOG_CF, CATALOG_COL); + if (serialized == null) return null; + RoleList roles = new RoleList(); + HBaseUtils.deserialize(roles, serialized); + return roles.roles; + } + + /** + * Find all roles directly participated in by a given principal. This builds the role cache + * because it assumes that subsequent calls may be made to find roles participated in indirectly. + * @param name username or role name + * @param type user or role + * @return map of role name to grant info for all roles directly participated in. + */ + Map getPrincipalDirectRoles(String name, PrincipalType type) + throws IOException { + buildRoleCache(); + + Map directRoles = new HashMap(); + for (Map.Entry e : roleCache.entrySet()) { + for (GrantInfoWritable giw : e.getValue().grantInfos) { + if (giw.principalType == type && giw.principalName.equals(name)) { + directRoles.put(e.getKey(), giw); + break; + } + } + } + return directRoles; + } + + /** + * Fetch all roles and users included directly in a given role. + * @param roleName name of the principal + * @return a list of all roles included in this role + * @throws IOException + */ + GrantInfoList getRolePrincipals(String roleName) throws IOException, NoSuchObjectException { + GrantInfoList rolePrincipals = roleCache.get(roleName); + if (rolePrincipals != null) return rolePrincipals; + byte[] key = HBaseUtils.buildKey(roleName); + byte[] serialized = read(ROLE_TABLE, key, CATALOG_CF, ROLES_COL); + if (serialized == null) return null; + rolePrincipals = new GrantInfoList(); + HBaseUtils.deserialize(rolePrincipals, serialized); + roleCache.put(roleName, rolePrincipals); + return rolePrincipals; + } + + /** + * Given a role, find all users who are either directly or indirectly participate in this role. + * This is expensive, it should be used sparingly. It scan the entire userToRole table and + * does a linear search on each entry. + * @param roleName name of the role + * @return set of all users in the role + * @throws IOException + * @throws NoSuchObjectException + */ + Set findAllUsersInRole(String roleName) throws IOException { + // Walk the userToRole table and collect every user that matches this role. + Set users = new HashSet(); + Iterator iter = scanWithFilter(USER_TO_ROLE_TABLE, null, CATALOG_CF, CATALOG_COL, null); + while (iter.hasNext()) { + RoleList roleList = new RoleList(); + Result result = iter.next(); + HBaseUtils.deserialize(roleList, result.getValue(CATALOG_CF, CATALOG_COL)); + for (String rn : roleList.roles) { + if (rn.equals(roleName)) { + users.add(new String(result.getRow(), HBaseUtils.ENCODING)); + break; + } + } + } + return users; + } + + /** + * Add a principal to a role. + * @param roleName name of the role to add principal to + * @param grantInfo grant information for this principal. + * @throws java.io.IOException + * @throws NoSuchObjectException + * + */ + void addPrincipalToRole(String roleName, GrantInfoWritable grantInfo) + throws IOException, NoSuchObjectException { + GrantInfoList rolePrincipals = getRolePrincipals(roleName); + if (rolePrincipals == null) { + // Happens the first time a principal is added to a role + rolePrincipals = new GrantInfoList(); + } + rolePrincipals.grantInfos.add(grantInfo); + byte[] key = HBaseUtils.buildKey(roleName); + byte[] serialized = HBaseUtils.serialize(rolePrincipals); + store(ROLE_TABLE, key, CATALOG_CF, ROLES_COL, serialized); + flush(); + roleCache.put(roleName, rolePrincipals); + } + + /** + * Drop a principal from a role. + * @param roleName Name of the role to drop the principal from + * @param principalName name of the principal to drop from the role + * @param type user or role + * @param grantOnly if this is true, just remove the grant option, don't actually remove the + * user from the role. + * @throws NoSuchObjectException + * @throws IOException + */ + void dropPrincipalFromRole(String roleName, String principalName, PrincipalType type, + boolean grantOnly) + throws NoSuchObjectException, IOException { + GrantInfoList rolePrincipals = getRolePrincipals(roleName); + if (rolePrincipals == null) { + // Means there aren't any principals in this role, so probably not a problem. + return; + } + for (int i = 0; i < rolePrincipals.grantInfos.size(); i++) { + if (rolePrincipals.grantInfos.get(i).principalType == type && + rolePrincipals.grantInfos.get(i).principalName.equals(principalName)) { + if (grantOnly) rolePrincipals.grantInfos.get(i).grantOption = false; + else rolePrincipals.grantInfos.remove(i); + break; + } + } + byte[] key = HBaseUtils.buildKey(roleName); + byte[] serialized = HBaseUtils.serialize(rolePrincipals); + store(ROLE_TABLE, key, CATALOG_CF, ROLES_COL, serialized); + flush(); + roleCache.put(roleName, rolePrincipals); + } + + /** + * Rebuild the row for a given user in the USER_TO_ROLE table. This is expensive. It + * should be called as infrequently as possible. + * @param userName name of the user + * @throws IOException + */ + void buildRoleMapForUser(String userName) throws IOException, NoSuchObjectException { + // This is mega ugly. Hopefully we don't have to do this too often. + // First, scan the role table and put it all in memory + buildRoleCache(); + LOG.debug("Building role map for " + userName); + + // Second, find every role the user participates in directly. + Set rolesToAdd = new HashSet(); + Set userSet = new HashSet(); + Set rolesToCheckNext = new HashSet(); + userSet.add(userName); + for (Map.Entry e : roleCache.entrySet()) { + for (GrantInfoWritable grantInfo : e.getValue().grantInfos) { + if (grantInfo.principalType == PrincipalType.USER && userName.equals(grantInfo.principalName)) { + rolesToAdd.add(e.getKey()); + rolesToCheckNext.add(e.getKey()); + LOG.debug("Adding " + e.getKey() + " to list of roles user is in directly"); + break; + } + } + } + + // Third, find every role the user participates in indirectly (that is, they have been + // granted into role X and role Y has been granted into role X). + while (rolesToCheckNext.size() > 0) { + Set tmpRolesToCheckNext = new HashSet(); + for (String roleName : rolesToCheckNext) { + GrantInfoList grantInfos = roleCache.get(roleName); + if (grantInfos == null) continue; // happens when a role contains no grants + for (GrantInfoWritable grantInfo : grantInfos.grantInfos) { + if (grantInfo.principalType == PrincipalType.ROLE && + rolesToAdd.add(grantInfo.principalName)) { + tmpRolesToCheckNext.add(grantInfo.principalName); + LOG.debug("Adding " + grantInfo.principalName + + " to list of roles user is in indirectly"); + } + } + } + rolesToCheckNext = tmpRolesToCheckNext; + } + + byte[] key = HBaseUtils.buildKey(userName); + byte[] serialized = HBaseUtils.serialize(new RoleList(new ArrayList(rolesToAdd))); + store(USER_TO_ROLE_TABLE, key, CATALOG_CF, CATALOG_COL, serialized); + flush(); + } + + /** + * Remove all of the grants for a role. This is not cheap. + * @param roleName + * @throws IOException + */ + void removeRoleGrants(String roleName) throws IOException { + buildRoleCache(); + + List puts = new ArrayList(); + // First, walk the role table and remove any references to this role + for (Map.Entry e : roleCache.entrySet()) { + boolean madeAChange = false; + for (int i = 0; i < e.getValue().grantInfos.size(); i++) { + if (e.getValue().grantInfos.get(i).principalType == PrincipalType.ROLE && + e.getValue().grantInfos.get(i).principalName.equals(roleName)) { + e.getValue().grantInfos.remove(i); + madeAChange = true; + break; + } + } + if (madeAChange) { + Put put = new Put(HBaseUtils.buildKey(e.getKey())); + put.add(CATALOG_CF, ROLES_COL, HBaseUtils.serialize(e.getValue())); + puts.add(put); + roleCache.put(e.getKey(), e.getValue()); + } + } + + if (puts.size() > 0) { + HTableInterface htab = getHTable(ROLE_TABLE); + htab.put(puts); + } + + // Remove any global privileges held by this role + PrincipalPrivilegeSet global = getGlobalPrivs(); + if (global != null && + global.getRolePrivileges() != null && + global.getRolePrivileges().remove(roleName) != null) { + putGlobalPrivs(global); + } + + // Now, walk the db table + puts.clear(); + List dbs = scanDatabases(null); + if (dbs == null) dbs = new ArrayList(); // rare, but can happen + for (Database db : dbs) { + if (db.getPrivileges() != null && + db.getPrivileges().getRolePrivileges() != null && + db.getPrivileges().getRolePrivileges().remove(roleName) != null) { + Put put = new Put(HBaseUtils.buildKey(db.getName())); + put.add(CATALOG_CF, CATALOG_COL, HBaseUtils.serialize(new DatabaseWritable(db))); + puts.add(put); + } + } + + if (puts.size() > 0) { + HTableInterface htab = getHTable(DB_TABLE); + htab.put(puts); + } + + // Finally, walk the table table + puts.clear(); + for (Database db : dbs) { + List tables = scanTables(db.getName(), null); + if (tables != null) { + for (Table table : tables) { + if (table.getPrivileges() != null && + table.getPrivileges().getRolePrivileges() != null && + table.getPrivileges().getRolePrivileges().remove(roleName) != null) { + Put put = new Put(HBaseUtils.buildKey(table.getDbName(), table.getTableName())); + put.add(CATALOG_CF, CATALOG_COL, HBaseUtils.serialize(new TableWritable(table))); + puts.add(put); + } + } + } + } + + if (puts.size() > 0) { + HTableInterface htab = getHTable(TABLE_TABLE); + htab.put(puts); + } + + flush(); + } + /** * Fetch a role * @param roleName name of the role @@ -535,8 +919,48 @@ void deleteRole(String roleName) throws IOException { byte[] key = HBaseUtils.buildKey(roleName); delete(ROLE_TABLE, key, null, null); flush(); + roleCache.remove(roleName); + } + + private static class RoleList implements Writable { + List roles; + + RoleList() { + } + + RoleList(List r) { + roles = r; + } + + @Override + public void write(DataOutput out) throws IOException { + HBaseUtils.writeStrList(out, roles); + } + + @Override + public void readFields(DataInput in) throws IOException { + roles = HBaseUtils.readStrList(in); + } } + private void buildRoleCache() throws IOException { + if (!entireRoleTableInCache) { + Iterator roles = scanWithFilter(ROLE_TABLE, null, CATALOG_CF, ROLES_COL, null); + while (roles.hasNext()) { + Result res = roles.next(); + String roleName = new String(res.getRow(), HBaseUtils.ENCODING); + GrantInfoList grantInfos = new GrantInfoList(); + HBaseUtils.deserialize(grantInfos, res.getValue(CATALOG_CF, ROLES_COL)); + roleCache.put(roleName, grantInfos); + } + entireRoleTableInCache = true; + } + } + + /********************************************************************************************** + * Table related methods + *********************************************************************************************/ + /** * Fetch a table object * @param dbName database the table is in @@ -652,6 +1076,24 @@ void deleteTable(String dbName, String tableName) throws IOException { flush(); } + private Table getTable(String dbName, String tableName, boolean populateCache) + throws IOException { + ObjectPair hashKey = new ObjectPair(dbName, tableName); + Table cached = tableCache.get(hashKey); + if (cached != null) return cached; + byte[] key = HBaseUtils.buildKey(dbName, tableName); + byte[] serialized = read(TABLE_TABLE, key, CATALOG_CF, CATALOG_COL); + if (serialized == null) return null; + TableWritable table = new TableWritable(); + HBaseUtils.deserialize(table, serialized); + if (populateCache) tableCache.put(hashKey, table.table); + return table.table; + } + + /********************************************************************************************** + * StorageDescriptor related methods + *********************************************************************************************/ + /** * If this serde has already been read, then return it from the cache. If not, read it, then * return it. @@ -741,6 +1183,32 @@ void decrementStorageDescriptorRefCount(StorageDescriptor sd) throws IOException throw new IOException("Too many unsuccessful attepts to increment storage counter"); } + private static class ByteArrayWrapper { + byte[] wrapped; + + ByteArrayWrapper(byte[] b) { + wrapped = b; + } + + @Override + public boolean equals(Object other) { + if (other instanceof ByteArrayWrapper) { + return Arrays.equals(((ByteArrayWrapper)other).wrapped, wrapped); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Arrays.hashCode(wrapped); + } + } + + /********************************************************************************************** + * Statistics related methods + *********************************************************************************************/ + /** * Update statistics for one or more columns for a table or a partition. * @param dbName database the table is in @@ -915,6 +1383,34 @@ ColumnStatistics getTableStatistics(String dbName, String tableName, List colNames; + List partVals; + byte[][] colKeys; + + PartStatsInfo(ColumnStatistics s, List pv, String pn) { + stats = s; partVals = pv; partName = pn; + colNames = new ArrayList(); + colKeys = null; + } + } + + private byte[] getStatisticsKey(String dbName, String tableName, List partVals) { + return partVals == null ? + HBaseUtils.buildKey(dbName, tableName) : + buildPartitionKey(dbName, tableName, partVals); + } + + private String getStatisticsTable(List partVals) { + return partVals == null ? TABLE_TABLE : PART_TABLE; + } + + /********************************************************************************************** + * Cache methods + *********************************************************************************************/ + /** * This should be called whenever a new query is started. */ @@ -926,42 +1422,17 @@ void flushCatalogCache() { tableCache.flush(); sdCache.flush(); partCache.flush(); + flushRoleCache(); } - @VisibleForTesting - int countStorageDescriptor() throws IOException { - ResultScanner scanner = getHTable(SD_TABLE).getScanner(new Scan()); - int cnt = 0; - while (scanner.next() != null) cnt++; - return cnt; + private void flushRoleCache() { + roleCache.clear(); + entireRoleTableInCache = false; } - private Table getTable(String dbName, String tableName, boolean populateCache) - throws IOException { - ObjectPair hashKey = new ObjectPair(dbName, tableName); - Table cached = tableCache.get(hashKey); - if (cached != null) return cached; - byte[] key = HBaseUtils.buildKey(dbName, tableName); - byte[] serialized = read(TABLE_TABLE, key, CATALOG_CF, CATALOG_COL); - if (serialized == null) return null; - TableWritable table = new TableWritable(); - HBaseUtils.deserialize(table, serialized); - if (populateCache) tableCache.put(hashKey, table.table); - return table.table; - } - - private Partition getPartition(String dbName, String tableName, List partVals, - boolean populateCache) throws IOException { - Partition cached = partCache.get(dbName, tableName, partVals); - if (cached != null) return cached; - byte[] key = buildPartitionKey(dbName, tableName, partVals); - byte[] serialized = read(PART_TABLE, key, CATALOG_CF, CATALOG_COL); - if (serialized == null) return null; - PartitionWritable part = new PartitionWritable(); - HBaseUtils.deserialize(part, serialized); - if (populateCache) partCache.put(dbName, tableName, part.part); - return part.part; - } + /********************************************************************************************** + * General access methods + *********************************************************************************************/ private void store(String table, byte[] key, byte[] colFam, byte[] colName, byte[] obj) throws IOException { @@ -1008,26 +1479,6 @@ private void delete(String table, byte[] key, byte[] colFam, byte[] colName) thr htab.delete(d); } - private List scanPartitions(byte[] keyPrefix, byte[] colFam, byte[] colName, - int maxResults) throws IOException { - return scanPartitionsWithFilter(keyPrefix, colFam, colName, maxResults, null); - } - - private List scanPartitionsWithFilter(byte[] keyPrefix, byte[] colFam, byte[] colName, - int maxResults, Filter filter) - throws IOException { - Iterator iter = - scanWithFilter(PART_TABLE, keyPrefix, colFam, colName, filter); - List parts = new ArrayList(); - int numToFetch = maxResults < 0 ? Integer.MAX_VALUE : maxResults; - for (int i = 0; i < numToFetch && iter.hasNext(); i++) { - PartitionWritable p = new PartitionWritable(); - HBaseUtils.deserialize(p, iter.next().getValue(colFam, colName)); - parts.add(p.part); - } - return parts; - } - private Iterator scanWithFilter(String table, byte[] keyPrefix, byte[] colFam, byte[] colName, Filter filter) throws IOException { HTableInterface htab = getHTable(table); @@ -1069,33 +1520,21 @@ private void flush() throws IOException { for (HTableInterface htab : tables.values()) htab.flushCommits(); } - private byte[] buildPartitionKey(String dbName, String tableName, List partVals) { - Deque keyParts = new ArrayDeque(partVals); - keyParts.addFirst(tableName); - keyParts.addFirst(dbName); - return HBaseUtils.buildKey(keyParts.toArray(new String[keyParts.size()])); - } - - private byte[] buildPartitionKey(PartitionWritable part) throws IOException { - Deque keyParts = new ArrayDeque(part.part.getValues()); - keyParts.addFirst(part.part.getTableName()); - keyParts.addFirst(part.part.getDbName()); - return HBaseUtils.buildKey(keyParts.toArray(new String[keyParts.size()])); - } - private byte[] hash(byte[] serialized) throws IOException { md.update(serialized); return md.digest(); } - private byte[] getStatisticsKey(String dbName, String tableName, List partVals) { - return partVals == null ? - HBaseUtils.buildKey(dbName, tableName) : - buildPartitionKey(dbName, tableName, partVals); - } + /********************************************************************************************** + * Testing methods and classes + *********************************************************************************************/ - private String getStatisticsTable(List partVals) { - return partVals == null ? TABLE_TABLE : PART_TABLE; + @VisibleForTesting + int countStorageDescriptor() throws IOException { + ResultScanner scanner = getHTable(SD_TABLE).getScanner(new Scan()); + int cnt = 0; + while (scanner.next() != null) cnt++; + return cnt; } /** @@ -1107,42 +1546,6 @@ void setConnection(HConnection connection) { conn = connection; } - private static class ByteArrayWrapper { - byte[] wrapped; - - ByteArrayWrapper(byte[] b) { - wrapped = b; - } - - @Override - public boolean equals(Object other) { - if (other instanceof ByteArrayWrapper) { - return Arrays.equals(((ByteArrayWrapper)other).wrapped, wrapped); - } else { - return false; - } - } - - @Override - public int hashCode() { - return Arrays.hashCode(wrapped); - } - } - - private static class PartStatsInfo { - ColumnStatistics stats; - String partName; - List colNames; - List partVals; - byte[][] colKeys; - - PartStatsInfo(ColumnStatistics s, List pv, String pn) { - stats = s; partVals = pv; partName = pn; - colNames = new ArrayList(); - colKeys = null; - } - } - // For testing without the cache private static class BogusObjectCache extends ObjectCache { static Counter bogus = new Counter("bogus"); diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java index 0695dd2..a2a3ddc 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java @@ -18,11 +18,13 @@ */ package org.apache.hadoop.hive.metastore.hbase; +import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.RawStore; import org.apache.hadoop.hive.metastore.api.AggrStats; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; @@ -45,6 +47,7 @@ import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; @@ -52,10 +55,13 @@ import org.apache.hadoop.hive.metastore.api.UnknownPartitionException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.metastore.model.MDBPrivilege; +import org.apache.hadoop.hive.metastore.model.MDatabase; import org.apache.hadoop.hive.metastore.model.MGlobalPrivilege; import org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege; import org.apache.hadoop.hive.metastore.model.MPartitionPrivilege; +import org.apache.hadoop.hive.metastore.model.MRole; import org.apache.hadoop.hive.metastore.model.MRoleMap; +import org.apache.hadoop.hive.metastore.model.MTable; import org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege; import org.apache.hadoop.hive.metastore.model.MTablePrivilege; import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy; @@ -63,8 +69,11 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; /** * Implementation of RawStore that stores data in HBase @@ -114,7 +123,6 @@ public void createDatabase(Database db) throws InvalidObjectException, MetaExcep // HiveMetaStore already checks for existence of the database, don't recheck getHBase().putDb(db); } catch (IOException e) { - // TODO NOt sure what i should throw here LOG.error("Unable to create database ", e); throw new MetaException("Unable to read from or write to hbase " + e.getMessage()); } @@ -199,7 +207,6 @@ public void createTable(Table tbl) throws InvalidObjectException, MetaException try { getHBase().putTable(tbl); } catch (IOException e) { - // TODO NOt sure what i should throw here LOG.error("Unable to create table ", e); throw new MetaException("Unable to read from or write to hbase " + e.getMessage()); } @@ -486,6 +493,40 @@ public boolean isPartitionMarkedForEvent(String dbName, String tblName, throw new UnsupportedOperationException(); } + /* + * The design for roles. Roles are a pain because of their hierarchical nature. When a user + * comes in and we need to be able to determine all roles he is a part of, we do not want to + * have to walk the hierarchy in the database. This means we need to flatten the role map for + * each user. But we also have to track how the roles are connected for each user, in case one + * role is revoked from another (e.g. if role1 is included in role2 but then revoked + * from it and user1 was granted both role2 and role1 we cannot remove user1 from role1 + * because he was granted that separately). + * + * We want to optimize for the read case and put the cost on grant and revoke of roles, since + * we assume that is the less common case. So we lay out the roles data as follows: + * + * There is a ROLES table that records each role, plus what other principals have been granted + * into it, along with the info on grantor, etc. + * + * There is a USER_TO_ROLES table that contains the mapping of each user to every role he is a + * part of. + * + * This makes determining what roles a user participates in very quick, as USER_TO_ROLE is a + * simple list for each user. It makes granting users into roles expensive, and granting roles + * into roles very expensive. Each time a user is granted into a role, we need to walk the + * hierarchy in the role table (which means moving through that table multiple times) to + * determine every role the user participates in. Each a role is granted into another role + * this hierarchical walk must be done for every principal in the role being granted into. To + * mitigate this pain somewhat whenever doing these mappings we cache the entire ROLES table in + * memory since we assume it is not large. + * + * On a related note, whenever a role is dropped we must walk not only all these role tables + * above (equivalent to a role being revoked from another role, since we have to rebuilding + * mappings for any users in roles that contained that role and any users directly in that + * role), but we also have to remove all the privileges associated with that role directly. + * That means a walk of the DBS table and of the TBLS table. + */ + @Override public boolean addRole(String roleName, String ownerName) throws InvalidObjectException, MetaException, NoSuchObjectException { @@ -498,7 +539,6 @@ public boolean addRole(String roleName, String ownerName) throws InvalidObjectEx getHBase().putRole(role); return true; } catch (IOException e) { - // TODO NOt sure what i should throw here LOG.error("Unable to create role ", e); throw new MetaException("Unable to read from or write to hbase " + e.getMessage()); } @@ -507,7 +547,12 @@ public boolean addRole(String roleName, String ownerName) throws InvalidObjectEx @Override public boolean removeRole(String roleName) throws MetaException, NoSuchObjectException { try { + Set usersInRole = getHBase().findAllUsersInRole(roleName); getHBase().deleteRole(roleName); + getHBase().removeRoleGrants(roleName); + for (String user : usersInRole) { + getHBase().buildRoleMapForUser(user); + } return true; } catch (IOException e) { LOG.error("Unable to delete role" + e); @@ -517,35 +562,131 @@ public boolean removeRole(String roleName) throws MetaException, NoSuchObjectExc @Override public boolean grantRole(Role role, String userName, PrincipalType principalType, String grantor, - PrincipalType grantorType, boolean grantOption) throws MetaException, - NoSuchObjectException, InvalidObjectException { - throw new UnsupportedOperationException(); + PrincipalType grantorType, boolean grantOption) + throws MetaException, NoSuchObjectException, InvalidObjectException { + try { + Set usersToRemap = findUsersToRemapRolesFor(role, userName, principalType); + getHBase().addPrincipalToRole(role.getRoleName(), + new GrantInfoWritable(userName, principalType, (int)(System.currentTimeMillis() / 1000), + grantor, grantorType, grantOption)); + for (String user : usersToRemap) { + getHBase().buildRoleMapForUser(user); + } + return true; + } catch (IOException e) { + LOG.error("Unable to grant role", e); + throw new MetaException("Unable to grant role " + e.getMessage()); + } } @Override public boolean revokeRole(Role role, String userName, PrincipalType principalType, boolean grantOption) throws MetaException, NoSuchObjectException { - throw new UnsupportedOperationException(); + // This can have a couple of different meanings. If grantOption is true, then this is only + // revoking the grant option, the role itself doesn't need to be removed. If it is false + // then we need to remove the userName from the role altogether. + try { + if (grantOption) { + // If this is a grant only change, we don't need to rebuild the user mappings. + getHBase().dropPrincipalFromRole(role.getRoleName(), userName, principalType, grantOption); + } else { + Set usersToRemap = findUsersToRemapRolesFor(role, userName, principalType); + getHBase().dropPrincipalFromRole(role.getRoleName(), userName, principalType, grantOption); + for (String user : usersToRemap) { + getHBase().buildRoleMapForUser(user); + } + } + return true; + } catch (IOException e) { + LOG.error("Unable to revoke role " + role.getRoleName() + " from " + userName, e); + throw new MetaException("Unable to revoke role " + e.getMessage()); + } } @Override - public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, List groupNames) throws - InvalidObjectException, MetaException { - throw new UnsupportedOperationException(); + public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, List groupNames) + throws InvalidObjectException, MetaException { + try { + PrincipalPrivilegeSet pps = new PrincipalPrivilegeSet(); + PrincipalPrivilegeSet global = getHBase().getGlobalPrivs(); + if (global == null) return null; + List pgi = global.getUserPrivileges().get(userName); + if (pgi != null) { + pps.putToUserPrivileges(userName, pgi); + } + + List roles = getHBase().getUserRoles(userName); + if (roles != null) { + for (String role : roles) { + pgi = global.getRolePrivileges().get(role); + if (pgi != null) { + pps.putToRolePrivileges(role, pgi); + } + } + } + return pps; + } catch (IOException e) { + LOG.error("Unable to get db privileges for user", e); + throw new MetaException("Unable to get db privileges for user, " + e.getMessage()); + } } @Override public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName, String userName, - List groupNames) throws - InvalidObjectException, MetaException { - throw new UnsupportedOperationException(); + List groupNames) + throws InvalidObjectException, MetaException { + try { + PrincipalPrivilegeSet pps = new PrincipalPrivilegeSet(); + Database db = getHBase().getDb(dbName); + // Find the user privileges for this db + List pgi = db.getPrivileges().getUserPrivileges().get(userName); + if (pgi != null) { + pps.putToUserPrivileges(userName, pgi); + } + + List roles = getHBase().getUserRoles(userName); + if (roles != null) { + for (String role : roles) { + pgi = db.getPrivileges().getRolePrivileges().get(role); + if (pgi != null) { + pps.putToRolePrivileges(role, pgi); + } + } + } + return pps; + } catch (IOException e) { + LOG.error("Unable to get db privileges for user", e); + throw new MetaException("Unable to get db privileges for user, " + e.getMessage()); + } } @Override public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName, String tableName, - String userName, List groupNames) throws - InvalidObjectException, MetaException { - throw new UnsupportedOperationException(); + String userName, List groupNames) + throws InvalidObjectException, MetaException { + try { + PrincipalPrivilegeSet pps = new PrincipalPrivilegeSet(); + Table table = getHBase().getTable(dbName, tableName); + // Find the user privileges for this db + List pgi = table.getPrivileges().getUserPrivileges().get(userName); + if (pgi != null) { + pps.putToUserPrivileges(userName, pgi); + } + + List roles = getHBase().getUserRoles(userName); + if (roles != null) { + for (String role : roles) { + pgi = table.getPrivileges().getRolePrivileges().get(role); + if (pgi != null) { + pps.putToRolePrivileges(role, pgi); + } + } + } + return pps; + } catch (IOException e) { + LOG.error("Unable to get db privileges for user", e); + throw new MetaException("Unable to get db privileges for user, " + e.getMessage()); + } } @Override @@ -553,7 +694,8 @@ public PrincipalPrivilegeSet getPartitionPrivilegeSet(String dbName, String tabl String partition, String userName, List groupNames) throws InvalidObjectException, MetaException { - throw new UnsupportedOperationException(); + // We don't support partition privileges + return null; } @Override @@ -562,25 +704,111 @@ public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, String tableNa String userName, List groupNames) throws InvalidObjectException, MetaException { - throw new UnsupportedOperationException(); + // We don't support column level privileges + return null; } + // TODO - we need to rework these listAll methods so they don't leak the M classes. Those are + // an artifact of the ObjectStore and don't belong in the RawStore interface. @Override public List listPrincipalGlobalGrants(String principalName, PrincipalType principalType) { - throw new UnsupportedOperationException(); + List grants; + try { + switch (principalType) { + case USER: + grants = getHBase().getGlobalPrivs().getUserPrivileges().get(principalName); + break; + + case ROLE: + grants = getHBase().getGlobalPrivs().getRolePrivileges().get(principalName); + break; + + default: + throw new RuntimeException("Unknown or unsupported principal type " + + principalType.toString()); + } + + if (grants == null || grants.size() == 0) return null; + List privileges = new ArrayList(grants.size()); + for (PrivilegeGrantInfo pgi : grants) { + privileges.add(new MGlobalPrivilege(principalName, principalType.toString(), + pgi.getPrivilege(), pgi.getCreateTime(), pgi.getGrantor(), + pgi.getGrantorType().toString(), pgi.isGrantOption())); + } + return privileges; + } catch (IOException e) { + throw new RuntimeException(e); + } } @Override public List listPrincipalDBGrants(String principalName, PrincipalType principalType, String dbName) { - throw new UnsupportedOperationException(); + List grants; + try { + Database db = getHBase().getDb(dbName); + switch (principalType) { + case USER: + grants = db.getPrivileges().getUserPrivileges().get(principalName); + break; + + case ROLE: + grants = db.getPrivileges().getRolePrivileges().get(principalName); + break; + + default: + throw new RuntimeException("Unknown or unsupported principal type " + + principalType.toString()); + } + + if (grants == null || grants.size() == 0) return null; + MDatabase mdb = new MDatabase(db.getName(), db.getLocationUri(), db.getDescription(), + db.getParameters()); + List privileges = new ArrayList(grants.size()); + for (PrivilegeGrantInfo pgi : grants) { + privileges.add(new MDBPrivilege(principalName, principalType.toString(), mdb, + pgi.getPrivilege(), pgi.getCreateTime(), pgi.getGrantor(), + pgi.getGrantorType().toString(), pgi.isGrantOption())); + } + return privileges; + } catch (IOException e) { + throw new RuntimeException(e); + } } @Override public List listAllTableGrants(String principalName, PrincipalType principalType, String dbName, String tableName) { - throw new UnsupportedOperationException(); + List grants; + try { + Table table = getHBase().getTable(dbName, tableName); + switch (principalType) { + case USER: + grants = table.getPrivileges().getUserPrivileges().get(principalName); + break; + + case ROLE: + grants = table.getPrivileges().getRolePrivileges().get(principalName); + break; + + default: + throw new RuntimeException("Unknown or unsupported principal type " + + principalType.toString()); + } + + if (grants == null || grants.size() == 0) return null; + MTable mtable = null; + List privileges = new ArrayList(grants.size()); + for (PrivilegeGrantInfo pgi : grants) { + privileges.add(new MTablePrivilege(principalName, principalType.toString(), mtable, + pgi.getPrivilege(), pgi.getCreateTime(), pgi.getGrantor(), + pgi.getGrantorType().toString(), pgi.isGrantOption())); + } + return privileges; + } catch (IOException e) { + throw new RuntimeException(e); + } } @Override @@ -588,7 +816,8 @@ public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, String tableNa PrincipalType principalType, String dbName, String tableName, String partName) { - throw new UnsupportedOperationException(); + // We don't support partition grants + return null; } @Override @@ -596,7 +825,8 @@ public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, String tableNa PrincipalType principalType, String dbName, String tableName, String columnName) { - throw new UnsupportedOperationException(); + // We don't support column grants + return null; } @Override @@ -606,20 +836,165 @@ public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, String tableNa String tableName, String partName, String columnName) { - throw new UnsupportedOperationException(); + // We don't support column grants + return null; } @Override - public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectException, - MetaException, NoSuchObjectException { - // TODO + public boolean grantPrivileges(PrivilegeBag privileges) + throws InvalidObjectException, MetaException, NoSuchObjectException { + for (HiveObjectPrivilege priv : privileges.getPrivileges()) { + // Locate the right object to deal with + PrivilegeInfo privilegeInfo = findPrivilegeToGrantOrRevoke(priv); + + // Now, let's see if we've already got this privilege + for (PrivilegeGrantInfo info : privilegeInfo.grants) { + if (info.getPrivilege().equals(priv.getGrantInfo().getPrivilege())) { + throw new InvalidObjectException(priv.getPrincipalName() + " already has " + + priv.getGrantInfo().getPrivilege() + " on " + privilegeInfo.typeErrMsg); + } + } + privilegeInfo.grants.add(priv.getGrantInfo()); + + writeBackGrantOrRevoke(priv, privilegeInfo); + } return true; } @Override public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) throws InvalidObjectException, MetaException, NoSuchObjectException { - throw new UnsupportedOperationException(); + for (HiveObjectPrivilege priv : privileges.getPrivileges()) { + PrivilegeInfo privilegeInfo = findPrivilegeToGrantOrRevoke(priv); + + for (int i = 0; i < privilegeInfo.grants.size(); i++) { + if (privilegeInfo.grants.get(i).getPrivilege().equals(priv.getGrantInfo().getPrivilege())) { + if (grantOption) privilegeInfo.grants.get(i).setGrantOption(false); + else privilegeInfo.grants.remove(i); + break; + } + } + writeBackGrantOrRevoke(priv, privilegeInfo); + } + return true; + } + + private static class PrivilegeInfo { + Database db; + Table table; + List grants; + String typeErrMsg; + PrincipalPrivilegeSet privSet; + } + + private PrivilegeInfo findPrivilegeToGrantOrRevoke(HiveObjectPrivilege privilege) + throws MetaException, NoSuchObjectException, InvalidObjectException { + PrivilegeInfo result = new PrivilegeInfo(); + switch (privilege.getHiveObject().getObjectType()) { + case GLOBAL: + try { + result.privSet = createOnNull(getHBase().getGlobalPrivs()); + } catch (IOException e) { + LOG.error("Unable to fetch global privileges", e); + throw new MetaException("Unable to fetch global privileges, " + e.getMessage()); + } + result.typeErrMsg = "global"; + break; + + case DATABASE: + result.db = getDatabase(privilege.getHiveObject().getDbName()); + result.typeErrMsg = "database " + result.db.getName(); + result.privSet = createOnNull(result.db.getPrivileges()); + break; + + case TABLE: + result.table = getTable(privilege.getHiveObject().getDbName(), + privilege.getHiveObject().getObjectName()); + result.typeErrMsg = "table " + result.table.getTableName(); + result.privSet = createOnNull(result.table.getPrivileges()); + break; + + case PARTITION: + case COLUMN: + throw new RuntimeException("HBase metastore does not support partition or column " + + "permissions"); + + default: + throw new RuntimeException("Woah bad, unknown object type " + + privilege.getHiveObject().getObjectType()); + } + + // Locate the right PrivilegeGrantInfo + Map> grantInfos; + switch (privilege.getPrincipalType()) { + case USER: + grantInfos = result.privSet.getUserPrivileges(); + result.typeErrMsg = "user"; + break; + + case GROUP: + throw new RuntimeException("HBase metastore does not support group permissions"); + + case ROLE: + grantInfos = result.privSet.getRolePrivileges(); + result.typeErrMsg = "role"; + break; + + default: + throw new RuntimeException("Woah bad, unknown principal type " + + privilege.getPrincipalType()); + } + + // Find the requested name in the grantInfo + result.grants = grantInfos.get(privilege.getPrincipalName()); + if (result.grants == null) { + // Means we don't have any grants for this user yet. + result.grants = new ArrayList(); + grantInfos.put(privilege.getPrincipalName(), result.grants); + } + return result; + } + + private PrincipalPrivilegeSet createOnNull(PrincipalPrivilegeSet pps) { + // If this is the first time a user has been granted a privilege set will be null. + if (pps == null) { + pps = new PrincipalPrivilegeSet(); + } + if (pps.getUserPrivileges() == null) { + pps.setUserPrivileges(new HashMap>()); + } + if (pps.getRolePrivileges() == null) { + pps.setRolePrivileges(new HashMap>()); + } + return pps; + } + + private void writeBackGrantOrRevoke(HiveObjectPrivilege priv, PrivilegeInfo pi) + throws MetaException, NoSuchObjectException, InvalidObjectException { + // Now write it back + switch (priv.getHiveObject().getObjectType()) { + case GLOBAL: + try { + getHBase().putGlobalPrivs(pi.privSet); + } catch (IOException e) { + LOG.error("Unable to write global privileges", e); + throw new MetaException("Unable to write global privileges, " + e.getMessage()); + } + break; + + case DATABASE: + pi.db.setPrivileges(pi.privSet); + alterDatabase(pi.db.getName(), pi.db); + break; + + case TABLE: + pi.table.setPrivileges(pi.privSet); + alterTable(pi.table.getDbName(), pi.table.getTableName(), pi.table); + break; + + default: + throw new RuntimeException("Dude, you missed the second switch!"); + } } @Override @@ -650,12 +1025,43 @@ public Role getRole(String roleName) throws NoSuchObjectException { @Override public List listRoles(String principalName, PrincipalType principalType) { - throw new UnsupportedOperationException(); + List maps = new ArrayList(); + try { + Map roles = + getHBase().getPrincipalDirectRoles(principalName, principalType); + for (Map.Entry e : roles.entrySet()) { + // TODO - change GrantInfoWritable to contain create time and owner of granted role + maps.add(new MRoleMap(principalName, principalType.toString(), + new MRole(e.getKey(), 0, null), e.getValue().addTime, e.getValue().grantor, + e.getValue().grantorType.toString(), e.getValue().grantOption)); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + // Add the public role if this is a user + if (principalType == PrincipalType.USER) { + maps.add(new MRoleMap(principalName, principalType.toString(), + new MRole(HiveMetaStore.PUBLIC, 0, null), 0, null, null, false)); + } + return maps; + } @Override public List listRoleMembers(String roleName) { - throw new UnsupportedOperationException(); + try { + GrantInfoList gil = getHBase().getRolePrincipals(roleName); + List roleMaps = new ArrayList(gil.grantInfos.size()); + for (GrantInfoWritable giw : gil.grantInfos) { + // TODO - change GrantInfoWritable to contain create time and owner of granted role + roleMaps.add(new MRoleMap(giw.principalName, giw.principalType.toString(), + new MRole(roleName, 0, null), giw.addTime, giw.grantor, giw.grantorType.toString(), + giw.grantOption)); + } + return roleMaps; + } catch (Exception e) { + throw new RuntimeException(e); + } } @Override @@ -1010,6 +1416,29 @@ private String buildExternalPartName(String dbName, String tableName, List findUsersToRemapRolesFor(Role role, String principalName, PrincipalType type) + throws IOException, NoSuchObjectException { + Set usersToRemap; + switch (type) { + case USER: + // In this case it's just the user being added to the role that we need to remap for. + usersToRemap = new HashSet(); + usersToRemap.add(principalName); + break; + + case ROLE: + // In this case we need to remap for all users in the containing role (not the role being + // granted into the containing role). + usersToRemap = getHBase().findAllUsersInRole(role.getRoleName()); + break; + + default: + throw new RuntimeException("Unknown principal type " + type); + + } + return usersToRemap; + } + /** * Build a partition name for external use. Necessary since HBase itself doesn't store * partition names. diff --git metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java index efe98ea..b7aa01c 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.StringColumnStatsData; import org.apache.hadoop.io.Writable; +import org.apache.thrift.TEnum; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -62,6 +63,7 @@ final static Charset ENCODING = StandardCharsets.UTF_8; final static char KEY_SEPARATOR = ':'; + final static String KEY_SEPARATOR_STR = new String(new char[] {KEY_SEPARATOR}); static final private Log LOG = LogFactory.getLog(HBaseUtils.class.getName()); @@ -330,12 +332,17 @@ static void writeOrderList(DataOutput out, List orderList) throws IOExcep } } + static PrincipalPrivilegeSet readPrivileges(byte[] bytes) throws IOException { + DataInput in = new DataInputStream(new ByteArrayInputStream(bytes)); + return readPrivileges(in); + } + static PrincipalPrivilegeSet readPrivileges(DataInput in) throws IOException { if (in.readBoolean()) { PrincipalPrivilegeSet pps = new PrincipalPrivilegeSet(); pps.setUserPrivileges(readPrivilege(in)); - pps.setGroupPrivileges(readPrivilege(in)); pps.setRolePrivileges(readPrivilege(in)); + // we ignore group privileges because we don't support old auth return pps; } else { return new PrincipalPrivilegeSet(); @@ -354,31 +361,37 @@ static PrincipalPrivilegeSet readPrivileges(DataInput in) throws IOException { for (int i = 0; i < sz; i++) { String key = readStr(in); int numGrants = in.readInt(); - if (numGrants == 0) { - priv.put(key, new ArrayList()); - } else { - for (int j = 0; j < numGrants; j++) { - PrivilegeGrantInfo pgi = new PrivilegeGrantInfo(); - pgi.setPrivilege(readStr(in)); - pgi.setCreateTime(in.readInt()); - pgi.setGrantor(readStr(in)); - pgi.setGrantorType(PrincipalType.findByValue(in.readInt())); - pgi.setGrantOption(in.readBoolean()); - } + List grants = new ArrayList(numGrants); + priv.put(key, grants); + for (int j = 0; j < numGrants; j++) { + PrivilegeGrantInfo pgi = new PrivilegeGrantInfo(); + pgi.setPrivilege(readStr(in)); + pgi.setCreateTime(in.readInt()); + pgi.setGrantor(readStr(in)); + pgi.setGrantorType(PrincipalType.findByValue(in.readInt())); + pgi.setGrantOption(in.readBoolean()); + grants.add(pgi); } } return priv; } } + static byte[] writePrivileges(PrincipalPrivilegeSet privSet) throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + DataOutputStream dos = new DataOutputStream(baos); + writePrivileges(dos, privSet); + return baos.toByteArray(); + } + static void writePrivileges(DataOutput out, PrincipalPrivilegeSet privs) throws IOException { if (privs == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writePrivilege(out, privs.getUserPrivileges()); - writePrivilege(out, privs.getGroupPrivileges()); writePrivilege(out, privs.getRolePrivileges()); + // we ignore group privileges because we don't support old auth } } @@ -407,7 +420,7 @@ private static void writePrivilege(DataOutput out, Map>()); + pps.setRolePrivileges(new HashMap>()); + + pps.getUserPrivileges().put("fred", Arrays.asList(new PrivilegeGrantInfo("read", 1, "daphne", + PrincipalType.USER, true))); + pps.getUserPrivileges().put("wilma", Arrays.asList(new PrivilegeGrantInfo("write", 1, + "scooby", PrincipalType.USER, false))); + pps.getRolePrivileges().put("role1", Arrays.asList(new PrivilegeGrantInfo("exec", 1, + "shaggy", PrincipalType.ROLE, true))); + + byte[] serialized = HBaseUtils.writePrivileges(pps); + pps = HBaseUtils.readPrivileges(serialized); + + Assert.assertEquals(2, pps.getUserPrivileges().size()); + Assert.assertEquals(1, pps.getUserPrivileges().get("fred").size()); + PrivilegeGrantInfo pgi = pps.getUserPrivileges().get("fred").get(0); + Assert.assertEquals("read", pgi.getPrivilege()); + Assert.assertEquals(1, pgi.getCreateTime()); + Assert.assertEquals("daphne", pgi.getGrantor()); + Assert.assertEquals(PrincipalType.USER, pgi.getGrantorType()); + Assert.assertTrue(pgi.isGrantOption()); + + Assert.assertEquals(1, pps.getUserPrivileges().get("wilma").size()); + pgi = pps.getUserPrivileges().get("wilma").get(0); + Assert.assertEquals("write", pgi.getPrivilege()); + Assert.assertEquals(1, pgi.getCreateTime()); + Assert.assertEquals("scooby", pgi.getGrantor()); + Assert.assertEquals(PrincipalType.USER, pgi.getGrantorType()); + Assert.assertFalse(pgi.isGrantOption()); + + Assert.assertEquals(1, pps.getRolePrivileges().size()); + Assert.assertEquals(1, pps.getRolePrivileges().get("role1").size()); + pgi = pps.getRolePrivileges().get("role1").get(0); + Assert.assertEquals("exec", pgi.getPrivilege()); + Assert.assertEquals(1, pgi.getCreateTime()); + Assert.assertEquals("shaggy", pgi.getGrantor()); + Assert.assertEquals(PrincipalType.ROLE, pgi.getGrantorType()); + Assert.assertTrue(pgi.isGrantOption()); + } +}