diff --git storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java index 456bf14..ecadf3e 100644 --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseAuthorizationProvider.java @@ -18,126 +18,216 @@ package org.apache.hcatalog.hbase; +import java.io.IOException; +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.security.AccessDeniedException; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.AccessControllerProtocol; +import org.apache.hadoop.hbase.security.access.Permission; +import org.apache.hadoop.hbase.security.access.Permission.Action; +import org.apache.hadoop.hbase.security.access.TablePermission; +import org.apache.hadoop.hbase.security.access.UserPermission; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.hive.hbase.HBaseSerDe; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProviderBase; import org.apache.hadoop.hive.ql.security.authorization.Privilege; +import org.apache.hadoop.hive.serde2.SerDeException; +import org.apache.hcatalog.common.ErrorType; +import org.apache.hcatalog.common.HCatException; /** * This class is an implementation of HiveAuthorizationProvider to provide * authorization functionality for HBase tables. */ -class HBaseAuthorizationProvider implements HiveAuthorizationProvider { - - @Override - public Configuration getConf() { - return null; - } - - @Override - public void setConf(Configuration conf) { +class HBaseAuthorizationProvider extends HiveAuthorizationProviderBase { + + HBaseHCatStorageHandler handler; + protected HBaseAdmin admin; + protected HTable aclTable; + + public void init(Configuration conf, HBaseHCatStorageHandler handler) throws HiveException, + MetaException { + super.setConf(handler.getConf()); + this.handler = handler; + this.admin = handler.getHBaseAdmin(); + + try { + aclTable = new HTable(conf, AccessControlLists.ACL_TABLE_NAME); + } catch (IOException ex) { + throw new HiveException(ex); } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider - * #init(org.apache.hadoop.conf.Configuration) - */ - @Override - public void init(Configuration conf) throws HiveException { + } + + protected AccessControllerProtocol getAcp(byte[] tableName) { + return aclTable.coprocessorProxy(AccessControllerProtocol.class, tableName); + } + + protected byte[] bytes(String str) { + return Bytes.toBytes(str); + } + + protected Action getAction(Privilege priv) { + switch (priv.getPriv()) { + case ALL : throw new AuthorizationException("no matching Action for Privilege.All"); + case ALTER_DATA : return Action.WRITE; + case ALTER_METADATA : return Action.WRITE; + case CREATE : return Action.CREATE; + case DROP : return Action.CREATE; //Hbase checks for CREATE at drop table + case INDEX : return Action.WRITE; + case LOCK : return Action.WRITE; + case SELECT : return Action.READ; + case SHOW_DATABASE : return Action.READ; + case UNKNOWN : + default : throw new AuthorizationException("Unknown privilege"); } + } + + protected EnumSet getActions(Privilege[] privs) { + EnumSet actions = EnumSet.noneOf(Action.class); - @Override - public HiveAuthenticationProvider getAuthenticator() { - return null; + if (privs == null) { + return actions; } - @Override - public void setAuthenticator(HiveAuthenticationProvider authenticator) { + for (Privilege priv : privs) { + actions.add(getAction(priv)); } + return actions; + } + + protected EnumSet getActions(Privilege[] inputPrivs, Privilege[] outputPrivs) { + EnumSet actions = getActions(inputPrivs); + actions.addAll(getActions(outputPrivs)); + return actions; + } + + @Override + public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + // nothing to authorize + } + + @Override + public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + // nothing to authorize. HBase does not have databases + } + + @Override + public void authorize(Table table, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider - * #authorize(org.apache.hadoop.hive.ql.security.authorization.Privilege[], - * org.apache.hadoop.hive.ql.security.authorization.Privilege[]) - */ - @Override - public void authorize(Privilege[] readRequiredPriv, - Privilege[] writeRequiredPriv) throws HiveException, - AuthorizationException { + byte[] tableName = bytes(handler.getHBaseTableName(table.getTTable())); + try { + EnumSet actions = getActions(readRequiredPriv, writeRequiredPriv); + if (actions.isEmpty()) { + return; //nothing to do here + } + + if (!admin.tableExists(tableName)) { + //check for global privs + checkAuthorized(actions); + } else { + //check for table/cf/cq level permissions + Permission[] perms = getTablePermissions(table, tableName, actions); + checkAuthorized(tableName, perms); + } + } catch (AccessDeniedException ex) { + throw new AuthorizationException(ex); + } catch (IOException ex) { + throw new HiveException(ex); + } catch (SerDeException ex) { + throw new HiveException(ex); } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider - * #authorize(org.apache.hadoop.hive.metastore.api.Database, - * org.apache.hadoop.hive.ql.security.authorization.Privilege[], - * org.apache.hadoop.hive.ql.security.authorization.Privilege[]) - */ - @Override - public void authorize(Database db, Privilege[] readRequiredPriv, - Privilege[] writeRequiredPriv) throws HiveException, - AuthorizationException { + } + + protected Permission[] getTablePermissions(Table table, byte[] tableName, EnumSet actions) + throws SerDeException { + String hbaseColumnsMapping = table.getParameters().get(HBaseSerDe.HBASE_COLUMNS_MAPPING); + if (hbaseColumnsMapping == null) { + hbaseColumnsMapping = table.getParameters().get(HBaseConstants.PROPERTY_COLUMN_MAPPING_KEY); } - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider - * #authorize(org.apache.hadoop.hive.ql.metadata.Table, - * org.apache.hadoop.hive.ql.security.authorization.Privilege[], - * org.apache.hadoop.hive.ql.security.authorization.Privilege[]) - */ - @Override - public void authorize(Table table, Privilege[] readRequiredPriv, - Privilege[] writeRequiredPriv) throws HiveException, - AuthorizationException { + List families = new ArrayList(); + List qualifiers = new ArrayList(); + List familiesBytes = new ArrayList(); + List qualifiersBytes = new ArrayList(); + int iKey = HBaseSerDe.parseColumnMapping(hbaseColumnsMapping, families, + familiesBytes, qualifiers, qualifiersBytes); + + Set perms = new HashSet(); + for (int i=0; i columns, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + throw new HiveException("HBase storage handler does not use partitions"); + } + + public void checkAuthorized(EnumSet actions) throws IOException { + HTable htable = null; + try { + AccessControllerProtocol acp = getAcp(new byte[0]); + Permission perm = new Permission(actions.toArray(new Action[actions.size()])); + acp.checkPermissions(new Permission[] {perm}); + } finally { + if (htable != null) { + htable.close(); + } } - - /* - * (non-Javadoc) - * - * @see - * org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider - * #authorize(org.apache.hadoop.hive.ql.metadata.Table, - * org.apache.hadoop.hive.ql.metadata.Partition, java.util.List, - * org.apache.hadoop.hive.ql.security.authorization.Privilege[], - * org.apache.hadoop.hive.ql.security.authorization.Privilege[]) - */ - @Override - public void authorize(Table table, Partition part, List columns, - Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) - throws HiveException, AuthorizationException { + } + + public void checkAuthorized(byte[] table, Permission[] permissions) throws IOException { + HTable htable = null; + try { + htable = new HTable(getConf(), table); + AccessControllerProtocol acp + = htable.coprocessorProxy(AccessControllerProtocol.class, new byte[0]); + + acp.checkPermissions(permissions); + } finally { + if (htable != null) { + htable.close(); + } } - + } } diff --git storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java index c9905ce..ff67a1e 100644 --- storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java +++ storage-drivers/hbase/src/java/org/apache/hcatalog/hbase/HBaseHCatStorageHandler.java @@ -82,6 +82,8 @@ public class HBaseHCatStorageHandler extends HCatStorageHandler { private HBaseAdmin admin; + protected HBaseAuthorizationProvider hbaseAuth; + /* * @return subclass of HCatInputStorageDriver * @@ -115,10 +117,16 @@ public class HBaseHCatStorageHandler extends HCatStorageHandler { @Override public HiveAuthorizationProvider getAuthorizationProvider() throws HiveException { - - HBaseAuthorizationProvider hbaseAuth = new HBaseAuthorizationProvider(); - hbaseAuth.init(getConf()); - return hbaseAuth; + if (hbaseAuth == null) { + try { + hbaseAuth = new HBaseAuthorizationProvider(); + hbaseAuth.init(getConf(), this); + } catch (MetaException ex) { + throw new HiveException(ex.getMessage()); + } + } + + return hbaseAuth; } /* @@ -328,7 +336,7 @@ public class HBaseHCatStorageHandler extends HCatStorageHandler { } - private HBaseAdmin getHBaseAdmin() throws MetaException { + protected HBaseAdmin getHBaseAdmin() throws MetaException { try { if (admin == null) { admin = new HBaseAdmin(this.getConf()); @@ -341,7 +349,7 @@ public class HBaseHCatStorageHandler extends HCatStorageHandler { } } - private String getHBaseTableName(Table tbl) { + protected String getHBaseTableName(Table tbl) { String tableName = tbl.getParameters().get(HBaseSerDe.HBASE_TABLE_NAME); if (tableName == null) { tableName = tbl.getSd().getSerdeInfo().getParameters() diff --git storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java index 4eb6258..34b6e8b 100644 --- storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java +++ storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java @@ -21,10 +21,15 @@ package org.apache.hcatalog.hbase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MasterNotRunningException; +import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.security.access.SecureTestUtil; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hcatalog.hbase.ManyMiniCluster.Builder; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -50,7 +55,7 @@ public abstract class SkeletonHBaseTest { protected void createTable(String tableName, String[] families) { try { - HBaseAdmin admin = new HBaseAdmin(getHbaseConf()); + HBaseAdmin admin = getHBaseAdmin(); HTableDescriptor tableDesc = new HTableDescriptor(tableName); for(String family: families) { HColumnDescriptor columnDescriptor = new HColumnDescriptor(family); @@ -64,6 +69,10 @@ public abstract class SkeletonHBaseTest { } + protected HBaseAdmin getHBaseAdmin() throws MasterNotRunningException, ZooKeeperConnectionException { + return new HBaseAdmin(getHbaseConf()); + } + protected String newTableName(String prefix) { String name =null; int tries = 100; @@ -87,6 +96,13 @@ public abstract class SkeletonHBaseTest { contextMap.get(getContextHandle()).start(); } + public static void setup(Context context) { + if(!contextMap.containsKey(getContextHandle())) + contextMap.put(getContextHandle(), context); + + contextMap.get(getContextHandle()).start(); + } + /** * shutdown an hbase cluster instance ant the end of the test suite */ @@ -173,7 +189,7 @@ public abstract class SkeletonHBaseTest { public void start() { if(usageCount++ == 0) { - cluster = ManyMiniCluster.create(new File(testDir)).build(); + cluster = createBuilder().build(); cluster.start(); hbaseConf = cluster.getHBaseConf(); jobConf = cluster.getJobConf(); @@ -182,6 +198,10 @@ public abstract class SkeletonHBaseTest { } } + public Builder createBuilder() { + return ManyMiniCluster.create(new File(testDir)); + } + public void stop() { if( --usageCount == 0) { try { @@ -223,4 +243,22 @@ public abstract class SkeletonHBaseTest { } } + public static class SecureContext extends Context { + public SecureContext(String handle) { + super(handle); + } + + public Builder createBuilder() { + try { + Builder builder = super.createBuilder(); + Configuration hbaseConf = HBaseConfiguration.create(); + SecureTestUtil.enableSecurity(hbaseConf); + builder.hbaseConf(hbaseConf); + return builder; + } catch (IOException ex) { + throw new RuntimeException(ex); + } + } + } + } diff --git storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseAuthorizationProvider.java storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseAuthorizationProvider.java new file mode 100644 index 0000000..94ed5ad --- /dev/null +++ storage-drivers/hbase/src/test/org/apache/hcatalog/hbase/TestHBaseAuthorizationProvider.java @@ -0,0 +1,412 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hcatalog.hbase; + +import static org.apache.hcatalog.HcatTestUtils.perm500; +import static org.apache.hcatalog.HcatTestUtils.perm700; +import static org.apache.hcatalog.HcatTestUtils.perm755; +import static org.apache.hcatalog.HcatTestUtils.perm777; + +import java.io.IOException; +import java.net.URI; +import java.security.PrivilegedExceptionAction; +import java.util.Map; + +import junit.framework.Assert; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.security.User; +import org.apache.hadoop.hbase.security.access.AccessControlLists; +import org.apache.hadoop.hbase.security.access.AccessControllerProtocol; +import org.apache.hadoop.hbase.security.access.Permission.Action; +import org.apache.hadoop.hbase.security.access.TablePermission; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hcatalog.HcatTestUtils; +import org.apache.hcatalog.cli.HCatDriver; +import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer; +import org.apache.hcatalog.security.StorageDelegationAuthorizationProvider; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class TestHBaseAuthorizationProvider extends SkeletonHBaseTest { + + protected static HiveConf conf; + protected static HCatDriver hcatDriver; + protected static Warehouse wh; + protected Path whPath; + protected FileSystem whFs; + protected Hive hive; + protected HBaseAdmin admin; + protected User adminUser, readerUser, limitedUser, deniedUser; + + @BeforeClass + public static void setup() { + SkeletonHBaseTest.setup(new SecureContext(SkeletonHBaseTest.DEFAULT_CONTEXT_HANDLE)); + } + + @Before + public void setUp() throws Exception { + conf = getHiveConf(); + conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName()); + URI fsuri = getFileSystem().getUri(); + whPath = new Path(fsuri.getScheme(), fsuri.getAuthority(), getTestDir()); + conf.set(HiveConf.ConfVars.HADOOPFS.varname, fsuri.toString()); + conf.set(ConfVars.METASTOREWAREHOUSE.varname, whPath.toString()); + conf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED, true); + conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, + StorageDelegationAuthorizationProvider.class.getCanonicalName()); + + // Add hbase properties + + for (Map.Entry el : getHbaseConf()) { + if (el.getKey().startsWith("hbase.")) { + conf.set(el.getKey(), el.getValue()); + } + } + + whFs = whPath.getFileSystem(conf); + hive = Hive.get(conf); + wh = new Warehouse(conf); + + HcatTestUtils.cleanupHMS(hive, wh, perm700); + + whFs.setPermission(whPath, perm755); + + SessionState.start(new CliSessionState(conf)); + hcatDriver = new HCatDriver(); + admin = getHBaseAdmin(); + + //create test users + //current user and admin has global privileges for all actions + adminUser = User.createUserForTesting(conf, "admin", new String[]{"supergroup"}); + readerUser = User.createUserForTesting(conf, "reader", new String[0]); + limitedUser = User.createUserForTesting(conf, "limited", new String[0]); + deniedUser = User.createUserForTesting(conf, "denied", new String[0]); + } + + public Path getDbPath(String dbName) throws MetaException, HiveException { + return wh.getDatabasePath(hive.getDatabase(dbName)); + } + + public void createTable(String tableName) throws Exception { + createTable(tableName, tableName); + } + + public void createTable(User user, String tableName) throws Exception { + createTable(user, tableName, tableName); + } + + public void createTable(String tableName, String hbaseTableName) throws Exception { + createTable(User.getCurrent(), tableName, hbaseTableName); + } + + public void createTable(User user, String tableName, String hbaseTableName) throws Exception { + exec(user, "CREATE TABLE %s(key int, value string, foo int) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' " + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val,cf2:val', " + + "'hbase.table.name'='%s')", tableName, hbaseTableName); + } + + public void createTableFail(String tableName) throws Exception { + createTableFail(User.getCurrent(), tableName); + } + + public void createTableFail(User user, String tableName) throws Exception { + execFail(user, "CREATE TABLE %s(key int, value string) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' " + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val')", tableName); + } + + public void createHBaseTable(String tableName) throws IOException { + createHBaseTable(tableName, "cf"); + } + + public void createHBaseTable(String tableName, String... families) throws IOException { + HTableDescriptor desc = new HTableDescriptor(tableName); + for (String family : families) { + desc.addFamily(new HColumnDescriptor(Bytes.toBytes(family))); + } + admin.createTable(desc); + } + + public void dropHBaseTable(String tableName) throws IOException { + admin.disableTable(tableName); + admin.deleteTable(tableName); + } + + public void grant(String tableName, String username, Action... actions) throws IOException { + grant(tableName, null, username, actions); + } + + public void grant(String tableName, String family, String username, Action... actions) + throws IOException { + grant(tableName, family, null, username, actions); + } + + public void grant(String tableName, String family, String qualifier, String username, + Action... actions) throws IOException { + HTable meta = new HTable(getHbaseConf(), AccessControlLists.ACL_TABLE_NAME); + byte[] table = Bytes.toBytes(tableName); + AccessControllerProtocol protocol = meta.coprocessorProxy(AccessControllerProtocol.class + , table); + + byte[] f = family == null ? null : Bytes.toBytes(family); + byte[] q = qualifier == null ? null : Bytes.toBytes(qualifier); + + protocol.grant(Bytes.toBytes(username), new TablePermission(table, f, q, actions)); + } + + public void revoke(String tableName, String username, Action action) throws IOException { + HTable meta = new HTable(getHbaseConf(), AccessControlLists.ACL_TABLE_NAME); + byte[] table = Bytes.toBytes(tableName); + AccessControllerProtocol protocol = meta.coprocessorProxy(AccessControllerProtocol.class + , table); + + protocol.revoke(Bytes.toBytes(username), new TablePermission(table, null, action)); + } + + @Test + public void testTableOps() throws Exception { + // default db + createTable("foo1"); + exec("DESCRIBE foo1"); + exec("DROP TABLE foo1"); + + //non default db + exec("CREATE DATABASE doo"); + exec("USE doo"); + createTable("foo4"); + exec("DESCRIBE foo4"); + exec("DROP TABLE foo4"); + + createTable("foo1"); + exec("DESCRIBE EXTENDED foo1"); + exec("DESCRIBE FORMATTED foo1"); + exec("DESCRIBE foo1.foo"); + exec("DROP TABLE foo1"); + + //admin user drops the table + whFs.setPermission(whPath, perm777); + exec("USE default"); + createTable("foo1"); + exec(adminUser, "DESCRIBE foo1"); + exec(adminUser, "DROP TABLE foo1"); + + //grant rights to limited user, which then reads from + createTable(adminUser, "foo1"); + grant("foo1", limitedUser.getShortName(), Action.READ); + exec(limitedUser, "DESCRIBE foo1"); + exec("DROP TABLE foo1"); + } + + @Test + public void testCreateTableFail1() throws Exception { + // default db + whFs.mkdirs(whPath, perm500); // revoke w at hdfs db location + createTableFail("foo1"); + } + + @Test + public void testCreateTableFail2() throws Exception { + // test with no hbase global write permission for the user + whFs.setPermission(whPath, perm777); //set the permissions for the db path so that hdfs will authorize + createTableFail(readerUser, "foo1"); + } + + @Test + public void testCreateTableFail3() throws Exception { + // non default db + exec("CREATE DATABASE doo"); + whFs.setPermission(getDbPath("doo"), perm500); + exec("use doo"); + createTableFail("foo4"); + } + + @Test + public void testExternalTableOps() throws Exception { + whFs.setPermission(whPath, perm777); + //1. Test create an external table as table owner + //create a table in hbase first + createHBaseTable("foo1", "cf1", "cf2", "cf3"); + + exec("CREATE EXTERNAL TABLE %s(key int, value string, foo int) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' " + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val,cf2:val', " + + "'hbase.table.name'='%s')", "foo1", "foo1"); + exec("DESCRIBE foo1"); + + //2. Test create an external table having table grants + createHBaseTable("foo2", "cf1", "cf2", "cf3"); + //grant table permissions to limitedUser + grant("foo2", limitedUser.getShortName(), Action.CREATE, Action.READ); + exec(limitedUser, "CREATE EXTERNAL TABLE %s(key int, value string, foo int) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' " + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val,cf2:val', " + + "'hbase.table.name'='%s')", "foo2", "foo2"); + exec(limitedUser, "DESCRIBE foo2"); + + //3. Test create an external table having only family grants + createHBaseTable("foo3", "cf1", "cf2", "cf3"); + //grant family permissions to limitedUser + grant("foo3", "cf1", limitedUser.getShortName(), Action.CREATE, Action.READ); + grant("foo3", "cf2", limitedUser.getShortName(), Action.CREATE, Action.READ); + exec(limitedUser, "CREATE EXTERNAL TABLE %s(key int, value string, foo int) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' " + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val,cf2:val', " + + "'hbase.table.name'='%s')", "foo3", "foo3"); + exec(limitedUser, "DESCRIBE foo3"); + + //4. Test create an external table having only column grants + createHBaseTable("foo4", "cf1", "cf2", "cf3"); + //grant qualifier permissions to limitedUser + grant("foo4", "cf1", "val", limitedUser.getShortName(), Action.CREATE, Action.READ); + grant("foo4", "cf2", "val", limitedUser.getShortName(), Action.CREATE, Action.READ); + exec(limitedUser, "CREATE EXTERNAL TABLE %s(key int, value string, foo int) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' " + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val,cf2:val', " + + "'hbase.table.name'='%s')", "foo4", "foo4"); + exec(limitedUser, "DESCRIBE foo4"); + + dropHBaseTable("foo1"); + dropHBaseTable("foo2"); + dropHBaseTable("foo3"); + dropHBaseTable("foo4"); + } + + @Test + public void testCreateExternalTableFail1() throws Exception { + whFs.setPermission(whPath, perm777); + //1. Test create an external table as not having rights + createHBaseTable("foo5", "cf1", "cf2", "cf3"); + + execFail(limitedUser, "CREATE EXTERNAL TABLE %s(key int, value string, foo int) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' " + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val,cf2:val', " + + "'hbase.table.name'='%s')", "foo5", "foo5"); + + //2. Test create an external table having partial rights grants + createHBaseTable("foo6", "cf1", "cf2", "cf3"); + //grant family permissions to limitedUser + grant("foo6", "cf1", limitedUser.getShortName(), Action.CREATE, Action.READ); + //no rights on cf2 + execFail(limitedUser, "CREATE EXTERNAL TABLE %s(key int, value string, foo int) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' " + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val,cf2:val', " + + "'hbase.table.name'='%s')", "foo6", "foo6"); + + //4. Test create an external table having partial column grants + createHBaseTable("foo7", "cf1", "cf2", "cf3"); + //grant qualifier permissions to limitedUser + grant("foo7", "cf1", "val", limitedUser.getShortName(), Action.CREATE, Action.READ); + //grant qualifier rights to a different qualifier + grant("foo7", "cf2", "val_not_used", limitedUser.getShortName(), Action.CREATE, Action.READ); + execFail(limitedUser, "CREATE EXTERNAL TABLE %s(key int, value string, foo int) STORED BY " + + "'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' " + + "TBLPROPERTIES ('hbase.columns.mapping'=':key,cf1:val,cf2:val', " + + "'hbase.table.name'='%s')", "foo7", "foo7"); + dropHBaseTable("foo5"); + dropHBaseTable("foo6"); + dropHBaseTable("foo7"); + } + + @Test + public void testDropTableFail1() throws Exception { + //default db + createTable("foo42"); + + execFail(readerUser, "DROP TABLE foo42"); + } + + @Test + public void testDropTableFail2() throws Exception { + //create a table named foo, but name it foo_hbase_table in HBase + createTable("foo", "foo_hbase_table"); + + //create an actual hbase table named foo + createHBaseTable("foo"); + + //grant CREATE in HBASE table foo, but not foo_hbase_table + grant("foo", limitedUser.getShortName(), Action.CREATE); + + //should not be able to DROP + execFail(limitedUser, "DROP TABLE foo"); + } + + @Test + public void testDescTableFail() throws Exception { + createTable("foo_desc_table"); + + execFail(readerUser, "DESCRIBE foo_desc_table"); + } + + //No ALTER TABLE in non-native tables + + /** Execute the query expecting success*/ + public void exec(String format, Object... args) throws Exception { + String command = String.format(format, args); + CommandProcessorResponse resp = hcatDriver.run(command); + Assert.assertEquals(resp.getErrorMessage(), 0, resp.getResponseCode()); + Assert.assertEquals(resp.getErrorMessage(), null, resp.getErrorMessage()); + } + + public void exec(User user, final String format, final Object... args) throws Exception { + user.runAs(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + exec(format, args); + return null; + } + }); + } + + /** Execute the query expecting it to fail with AuthorizationException */ + public void execFail(String format, Object ... args) throws Exception { + String command = String.format(format, args); + CommandProcessorResponse resp = hcatDriver.run(command); + Assert.assertNotSame(resp.getErrorMessage(), 0, resp.getResponseCode()); + Assert.assertTrue(resp.getResponseCode() == 403 || //hive checks fail with 403 + resp.getErrorMessage().contains("org.apache.hadoop.hive.ql.metadata.AuthorizationException")); + } + + public void execFail(User user, final String format, final Object... args) throws Exception { + user.runAs(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + execFail(format, args); + return null; + } + }); + } + +}