Index: conf/hive-default.xml.template =================================================================== --- conf/hive-default.xml.template (revision 1419361) +++ conf/hive-default.xml.template (working copy) @@ -1264,6 +1264,14 @@ + hive.security.metastore.authorization.manager + org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider + authorization manager class name to be used in the metastore for authorization. + The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider. + + + + hive.security.authenticator.manager org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator hive client authenticator manager class name. @@ -1271,6 +1279,13 @@ + hive.security.metastore.authenticator.manager + org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator + authenticator manager class name to be used in the metastore for authentication. + The user defined authenticator should implement interface org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider. + + + hive.security.authorization.createtable.user.grants the privileges automatically granted to some users whenever a table gets created. Index: metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (revision 1419361) +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (working copy) @@ -40,12 +40,12 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; @@ -67,6 +67,8 @@ public static final String DEFAULT_DATABASE_NAME = "default"; public static final String DEFAULT_DATABASE_COMMENT = "Default Hive database"; + public static final String DATABASE_WAREHOUSE_SUFFIX = ".db"; + /** * printStackTrace * @@ -1062,4 +1064,5 @@ throw new RuntimeException("Unable to instantiate " + theClass.getName(), e); } } + } Index: metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (revision 1419361) +++ metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (working copy) @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.metastore; +import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DATABASE_WAREHOUSE_SUFFIX; import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; import java.io.FileNotFoundException; @@ -164,6 +165,14 @@ return new Path(db.getLocationUri()); } + public Path getDefaultDatabasePath(String dbName) throws MetaException { + if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) { + return getWhRoot(); + } + return new Path(getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX); + } + + public Path getTablePath(Database db, String tableName) throws MetaException { return getDnsPath(new Path(getDatabasePath(db), tableName.toLowerCase())); Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (revision 1419361) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (working copy) @@ -59,7 +59,6 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException; -import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -87,6 +86,7 @@ import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.metastore.api.UnknownPartitionException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; +import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.metastore.events.AddPartitionEvent; import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent; import org.apache.hadoop.hive.metastore.events.AlterTableEvent; @@ -365,6 +365,10 @@ return conf; } + public Warehouse getWh() { + return wh; + } + /** * Get a cached RawStore. * @@ -397,7 +401,7 @@ } catch (NoSuchObjectException e) { ms.createDatabase( new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT, - getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null)); + wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null)); } HMSHandler.createDefaultDB = true; } @@ -515,22 +519,13 @@ return counters; } - private static final String DATABASE_WAREHOUSE_SUFFIX = ".db"; - - private Path getDefaultDatabasePath(String dbName) throws MetaException { - if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) { - return wh.getWhRoot(); - } - return new Path(wh.getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX); - } - private void create_database_core(RawStore ms, final Database db) throws AlreadyExistsException, InvalidObjectException, MetaException { if (!validateName(db.getName())) { throw new InvalidObjectException(db.getName() + " is not a valid database name"); } if (null == db.getLocationUri()) { - db.setLocationUri(getDefaultDatabasePath(db.getName()).toString()); + db.setLocationUri(wh.getDefaultDatabasePath(db.getName()).toString()); } else { db.setLocationUri(wh.getDnsPath(new Path(db.getLocationUri())).toString()); } Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1419361) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -610,6 +610,11 @@ "org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider"), HIVE_AUTHENTICATOR_MANAGER("hive.security.authenticator.manager", "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator"), + HIVE_METASTORE_AUTHORIZATION_MANAGER("hive.security.metastore.authorization.manager", + "org.apache.hadoop.hive.ql.security.authorization." + + "DefaultHiveMetastoreAuthorizationProvider"), + HIVE_METASTORE_AUTHENTICATOR_MANAGER("hive.security.metastore.authenticator.manager", + "org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator"), HIVE_AUTHORIZATION_TABLE_USER_GRANTS("hive.security.authorization.createtable.user.grants", ""), HIVE_AUTHORIZATION_TABLE_GROUP_GRANTS("hive.security.authorization.createtable.group.grants", ""), Index: ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java (revision 0) +++ ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java (revision 0) @@ -0,0 +1,303 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import java.io.IOException; +import java.net.ServerSocket; +import java.util.ArrayList; +import java.util.List; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.security.DummyHiveMetastoreAuthorizationProvider.AuthCallContext; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.ShimLoader; + +/** + * TestAuthorizationPreEventListener. Test case for + * {@link org.apache.hadoop.hive.metastore.AuthorizationPreEventListener} and + * {@link org.apache.hadoop.hive.metastore.MetaStorePreEventListener} + */ +public class TestAuthorizationPreEventListener extends TestCase { + private HiveConf clientHiveConf; + private HiveMetaStoreClient msc; + private Driver driver; + + @Override + protected void setUp() throws Exception { + + super.setUp(); + + int port = MetaStoreUtils.findFreePort(); + + System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, + AuthorizationPreEventListener.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname, + DummyHiveMetastoreAuthorizationProvider.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname, + HadoopDefaultMetastoreAuthenticator.class.getName()); + + MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge()); + + clientHiveConf = new HiveConf(this.getClass()); + + clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); + clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); + clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + + clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); + clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); + + + SessionState.start(new CliSessionState(clientHiveConf)); + msc = new HiveMetaStoreClient(clientHiveConf, null); + driver = new Driver(clientHiveConf); + } + + private static String getFreeAvailablePort() throws IOException { + ServerSocket socket = new ServerSocket(0); + socket.setReuseAddress(true); + int port = socket.getLocalPort(); + socket.close(); + return "" + port; + } + + @Override + protected void tearDown() throws Exception { + super.tearDown(); + } + + private void validateCreateDb(Database expectedDb, Database actualDb) { + assertEquals(expectedDb.getName(), actualDb.getName()); + assertEquals(expectedDb.getLocationUri(), actualDb.getLocationUri()); + } + + private void validateTable(Table expectedTable, Table actualTable) { + assertEquals(expectedTable.getTableName(), actualTable.getTableName()); + assertEquals(expectedTable.getDbName(), actualTable.getDbName()); + + // We won't try to be too strict in checking this because we're comparing + // table create intents with observed tables created. + // If it does have a location though, we will compare, as with external tables + if ((actualTable.getSd() != null) && (actualTable.getSd().getLocation() != null)){ + assertEquals(expectedTable.getSd().getLocation(), actualTable.getSd().getLocation()); + } + } + + private void validateCreateTable(Table expectedTable, Table actualTable) { + validateTable(expectedTable, actualTable); + } + + private void validateAddPartition(Partition expectedPartition, Partition actualPartition) { + validatePartition(expectedPartition,actualPartition); + } + + private void validatePartition(Partition expectedPartition, Partition actualPartition) { + assertEquals(expectedPartition.getValues(), + actualPartition.getValues()); + assertEquals(expectedPartition.getDbName(), + actualPartition.getDbName()); + assertEquals(expectedPartition.getTableName(), + actualPartition.getTableName()); + + // assertEquals(expectedPartition.getSd().getLocation(), + // actualPartition.getSd().getLocation()); + // we don't compare locations, because the location can still be empty in + // the pre-event listener before it is created. + + assertEquals(expectedPartition.getSd().getInputFormat(), + actualPartition.getSd().getInputFormat()); + assertEquals(expectedPartition.getSd().getOutputFormat(), + actualPartition.getSd().getOutputFormat()); + assertEquals(expectedPartition.getSd().getSerdeInfo(), + actualPartition.getSd().getSerdeInfo()); + + } + + private void validateAlterPartition(Partition expectedOldPartition, + Partition expectedNewPartition, String actualOldPartitionDbName, + String actualOldPartitionTblName,List actualOldPartitionValues, + Partition actualNewPartition) { + assertEquals(expectedOldPartition.getValues(), actualOldPartitionValues); + assertEquals(expectedOldPartition.getDbName(), actualOldPartitionDbName); + assertEquals(expectedOldPartition.getTableName(), actualOldPartitionTblName); + + validatePartition(expectedNewPartition, actualNewPartition); + } + + private void validateAlterTable(Table expectedOldTable, Table expectedNewTable, + Table actualOldTable, Table actualNewTable) { + validateTable(expectedOldTable, actualOldTable); + validateTable(expectedNewTable, actualNewTable); + } + + private void validateDropPartition(Partition expectedPartition, Partition actualPartition) { + validatePartition(expectedPartition, actualPartition); + } + + private void validateDropTable(Table expectedTable, Table actualTable) { + validateTable(expectedTable, actualTable); + } + + private void validateDropDb(Database expectedDb, Database actualDb) { + assertEquals(expectedDb, actualDb); + } + + public void testListener() throws Exception { + String dbName = "tmpdb"; + String tblName = "tmptbl"; + String renamed = "tmptbl2"; + int listSize = 0; + + List authCalls = DummyHiveMetastoreAuthorizationProvider.authCalls; + assertEquals(authCalls.size(),listSize); + + driver.run("create database " + dbName); + listSize++; + Database db = msc.getDatabase(dbName); + + Database dbFromEvent = (Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB); + validateCreateDb(db,dbFromEvent); + + driver.run("use " + dbName); + driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName)); + listSize++; + Table tbl = msc.getTable(dbName, tblName); + + Table tblFromEvent = ( + (org.apache.hadoop.hive.ql.metadata.Table) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE)) + .getTTable(); + validateCreateTable(tbl, tblFromEvent); + + driver.run("alter table tmptbl add partition (b='2011')"); + listSize++; + Partition part = msc.getPartition("tmpdb", "tmptbl", "b=2011"); + + Partition ptnFromEvent = ( + (org.apache.hadoop.hive.ql.metadata.Partition) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION)) + .getTPartition(); + validateAddPartition(part,ptnFromEvent); + + driver.run(String.format("alter table %s touch partition (%s)", tblName, "b='2011'")); + listSize++; + + //the partition did not change, + // so the new partition should be similar to the original partition + Partition modifiedP = msc.getPartition(dbName, tblName, "b=2011"); + + Partition ptnFromEventAfterAlter = ( + (org.apache.hadoop.hive.ql.metadata.Partition) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION)) + .getTPartition(); + + validateAlterPartition(part, modifiedP, ptnFromEventAfterAlter.getDbName(), + ptnFromEventAfterAlter.getTableName(), ptnFromEventAfterAlter.getValues(), + ptnFromEventAfterAlter); + + + List part_vals = new ArrayList(); + part_vals.add("c=2012"); + Partition newPart = msc.appendPartition(dbName, tblName, part_vals); + + listSize++; + + Partition newPtnFromEvent = ( + (org.apache.hadoop.hive.ql.metadata.Partition) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION)) + .getTPartition(); + validateAddPartition(newPart,newPtnFromEvent); + + + driver.run(String.format("alter table %s rename to %s", tblName, renamed)); + listSize++; + + Table renamedTable = msc.getTable(dbName, renamed); + Table renamedTableFromEvent = ( + (org.apache.hadoop.hive.ql.metadata.Table) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE)) + .getTTable(); + + validateAlterTable(tbl, renamedTable, renamedTableFromEvent, + renamedTable); + assertFalse(tbl.getTableName().equals(renamedTable.getTableName())); + + + //change the table name back + driver.run(String.format("alter table %s rename to %s", renamed, tblName)); + listSize++; + + driver.run(String.format("alter table %s drop partition (b='2011')", tblName)); + listSize++; + + Partition ptnFromDropPartition = ( + (org.apache.hadoop.hive.ql.metadata.Partition) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION)) + .getTPartition(); + + validateDropPartition(modifiedP, ptnFromDropPartition); + + driver.run("drop table " + tblName); + listSize++; + Table tableFromDropTableEvent = ( + (org.apache.hadoop.hive.ql.metadata.Table) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE)) + .getTTable(); + + + validateDropTable(tbl, tableFromDropTableEvent); + + driver.run("drop database " + dbName); + listSize++; + Database dbFromDropDatabaseEvent = + (Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB); + + validateDropDb(db, dbFromDropDatabaseEvent); + } + + public Object assertAndExtractSingleObjectFromEvent(int listSize, + List authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType callType) { + assertEquals(listSize, authCalls.size()); + assertEquals(1,authCalls.get(listSize-1).authObjects.size()); + + assertEquals(callType,authCalls.get(listSize-1).type); + return (authCalls.get(listSize-1).authObjects.get(0)); + } + +} Index: ql/src/test/org/apache/hadoop/hive/ql/security/TestDefaultHiveMetastoreAuthorizationProvider.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/security/TestDefaultHiveMetastoreAuthorizationProvider.java (revision 0) +++ ql/src/test/org/apache/hadoop/hive/ql/security/TestDefaultHiveMetastoreAuthorizationProvider.java (revision 0) @@ -0,0 +1,226 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; +import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.serde.serdeConstants; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.security.UserGroupInformation; + +/** + * TestDefaultHiveMetaStoreAuthorizationProvider. Test case for + * DefaultHiveMetastoreAuthorizationProvider + * using {@link org.apache.hadoop.hive.metastore.AuthorizationPreEventListener} + * + * Note that while we do use the hive driver to test, that is mostly for test + * writing ease, and it has the same effect as using a metastore client directly + * because we disable hive client-side authorization for this test, and only + * turn on server-side auth. + */ +public class TestDefaultHiveMetastoreAuthorizationProvider extends TestCase { + private HiveConf clientHiveConf; + private HiveMetaStoreClient msc; + private Driver driver; + private UserGroupInformation ugi; + + @Override + protected void setUp() throws Exception { + + super.setUp(); + + int port = MetaStoreUtils.findFreePort(); + + System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, + AuthorizationPreEventListener.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname, + DefaultHiveMetastoreAuthorizationProvider.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname, + InjectableDummyAuthenticator.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS.varname, ""); + + + MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge()); + + clientHiveConf = new HiveConf(this.getClass()); + + clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED,false); + + clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); + clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); + clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + + clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); + clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); + + ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf); + + SessionState.start(new CliSessionState(clientHiveConf)); + msc = new HiveMetaStoreClient(clientHiveConf, null); + driver = new Driver(clientHiveConf); + } + + @Override + protected void tearDown() throws Exception { + super.tearDown(); + } + + private void validateCreateDb(Database expectedDb, String dbName) { + assertEquals(expectedDb.getName(), dbName); + } + + private void validateCreateTable(Table expectedTable, String tblName, String dbName) { + assertNotNull(expectedTable); + assertEquals(expectedTable.getTableName(),tblName); + assertEquals(expectedTable.getDbName(),dbName); + } + + public void testSimplePrivileges() throws Exception { + String dbName = "smpdb"; + String tblName = "smptbl"; + + String userName = ugi.getUserName(); + + CommandProcessorResponse ret = driver.run("create database " + dbName); + assertEquals(0,ret.getResponseCode()); + Database db = msc.getDatabase(dbName); + + validateCreateDb(db,dbName); + + driver.run("use " + dbName); + ret = driver.run( + String.format("create table %s (a string) partitioned by (b string)", tblName)); + + assertEquals(1,ret.getResponseCode()); + // failure from not having permissions to create table + + ArrayList fields = new ArrayList(2); + fields.add(new FieldSchema("a", serdeConstants.STRING_TYPE_NAME, "")); + + Table ttbl = new Table(); + ttbl.setDbName(dbName); + ttbl.setTableName(tblName); + StorageDescriptor sd = new StorageDescriptor(); + ttbl.setSd(sd); + sd.setCols(fields); + sd.setParameters(new HashMap()); + sd.getParameters().put("test_param_1", "Use this for comments etc"); + sd.setSerdeInfo(new SerDeInfo()); + sd.getSerdeInfo().setName(ttbl.getTableName()); + sd.getSerdeInfo().setParameters(new HashMap()); + sd.getSerdeInfo().getParameters().put( + org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1"); + sd.getSerdeInfo().setSerializationLib( + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName()); + ttbl.setPartitionKeys(new ArrayList()); + + MetaException me = null; + try { + msc.createTable(ttbl); + } catch (MetaException e){ + me = e; + } + assertNotNull(me); + assertTrue(me.getMessage().indexOf("No privilege") != -1); + + driver.run("grant create on database "+dbName+" to user "+userName); + + driver.run("use " + dbName); + ret = driver.run( + String.format("create table %s (a string) partitioned by (b string)", tblName)); + + assertEquals(0,ret.getResponseCode()); // now it succeeds. + Table tbl = msc.getTable(dbName, tblName); + + validateCreateTable(tbl,tblName, dbName); + + String fakeUser = "mal"; + List fakeGroupNames = new ArrayList(); + fakeGroupNames.add("groupygroup"); + + InjectableDummyAuthenticator.injectUserName(fakeUser); + InjectableDummyAuthenticator.injectGroupNames(fakeGroupNames); + InjectableDummyAuthenticator.injectMode(true); + + ret = driver.run( + String.format("create table %s (a string) partitioned by (b string)", tblName+"mal")); + + assertEquals(1,ret.getResponseCode()); + + ttbl.setTableName(tblName+"mal"); + me = null; + try { + msc.createTable(ttbl); + } catch (MetaException e){ + me = e; + } + assertNotNull(me); + assertTrue(me.getMessage().indexOf("No privilege") != -1); + + ret = driver.run("alter table "+tblName+" add partition (b='2011')"); + assertEquals(1,ret.getResponseCode()); + + List ptnVals = new ArrayList(); + ptnVals.add("b=2011"); + Partition tpart = new Partition(); + tpart.setDbName(dbName); + tpart.setTableName(tblName); + tpart.setValues(ptnVals); + tpart.setParameters(new HashMap()); + tpart.setSd(tbl.getSd().deepCopy()); + tpart.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo().deepCopy()); + tpart.getSd().setLocation(tbl.getSd().getLocation() + "/tpart"); + + me = null; + try { + msc.add_partition(tpart); + } catch (MetaException e){ + me = e; + } + assertNotNull(me); + assertTrue(me.getMessage().indexOf("No privilege") != -1); + + InjectableDummyAuthenticator.injectMode(false); + + ret = driver.run("alter table "+tblName+" add partition (b='2011')"); + assertEquals(0,ret.getResponseCode()); + + } + +} Index: ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java (revision 0) +++ ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java (revision 0) @@ -0,0 +1,204 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.Privilege; + +public class DummyHiveMetastoreAuthorizationProvider implements HiveMetastoreAuthorizationProvider { + + + protected HiveAuthenticationProvider authenticator; + + public enum AuthCallContextType { + USER, + DB, + TABLE, + PARTITION, + TABLE_AND_PARTITION + }; + + class AuthCallContext { + + public AuthCallContextType type; + public List authObjects; + public Privilege[] readRequiredPriv; + public Privilege[] writeRequiredPriv; + + AuthCallContext(AuthCallContextType typeOfCall, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) { + this.type = typeOfCall; + this.authObjects = new ArrayList(); + this.readRequiredPriv = readRequiredPriv; + this.writeRequiredPriv = writeRequiredPriv; + } + AuthCallContext(AuthCallContextType typeOfCall, Object authObject, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) { + this(typeOfCall,readRequiredPriv,writeRequiredPriv); + this.authObjects.add(authObject); + } + AuthCallContext(AuthCallContextType typeOfCall, List authObjects, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) { + this(typeOfCall,readRequiredPriv,writeRequiredPriv); + this.authObjects.addAll(authObjects); + } + } + + public static final List authCalls = new ArrayList(); + + private Configuration conf; + public static final Log LOG = LogFactory.getLog( + DummyHiveMetastoreAuthorizationProvider.class);; + + @Override + public Configuration getConf() { + return this.conf; + } + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + try { + init(conf); + } catch (HiveException e) { + throw new RuntimeException(e); + } + } + + @Override + public HiveAuthenticationProvider getAuthenticator() { + return authenticator; + } + + @Override + public void setAuthenticator(HiveAuthenticationProvider authenticator) { + this.authenticator = authenticator; + } + + @Override + public void init(Configuration conf) throws HiveException { + debugLog("DHMAP.init"); + } + + @Override + public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + debugLog("DHMAP.authorize " + + "read:" + debugPrivPrint(readRequiredPriv) + + " , write:" + debugPrivPrint(writeRequiredPriv) + ); + authCalls.add(new AuthCallContext(AuthCallContextType.USER, + readRequiredPriv, writeRequiredPriv)); + } + + @Override + public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + debugLog("DHMAP.authorizedb " + + "db:" + db.getName() + + " , read:" + debugPrivPrint(readRequiredPriv) + + " , write:" + debugPrivPrint(writeRequiredPriv) + ); + authCalls.add(new AuthCallContext(AuthCallContextType.DB, + db, readRequiredPriv, writeRequiredPriv)); + } + + @Override + public void authorize(Table table, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + debugLog("DHMAP.authorizetbl " + + "tbl:" + table.getCompleteName() + + " , read:" + debugPrivPrint(readRequiredPriv) + + " , write:" + debugPrivPrint(writeRequiredPriv) + ); + authCalls.add(new AuthCallContext(AuthCallContextType.TABLE, + table, readRequiredPriv, writeRequiredPriv)); + + } + + @Override + public void authorize(Partition part, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + debugLog("DHMAP.authorizepart " + + "tbl:" + part.getTable().getCompleteName() + + " , part: " + part.getName() + + " , read:" + debugPrivPrint(readRequiredPriv) + + " , write:" + debugPrivPrint(writeRequiredPriv) + ); + authCalls.add(new AuthCallContext(AuthCallContextType.PARTITION, + part, readRequiredPriv, writeRequiredPriv)); + + } + + @Override + public void authorize(Table table, Partition part, List columns, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + debugLog("DHMAP.authorizecols " + + "tbl:" + table.getCompleteName() + + " , part: " + part.getName() + + " . cols: " + columns.toString() + + " , read:" + debugPrivPrint(readRequiredPriv) + + " , write:" + debugPrivPrint(writeRequiredPriv) + ); + List authObjects = new ArrayList(); + authObjects.add(table); + authObjects.add(part); + authCalls.add(new AuthCallContext(AuthCallContextType.TABLE_AND_PARTITION, + authObjects, readRequiredPriv, writeRequiredPriv)); + + } + + private void debugLog(String s) { + LOG.debug(s); + } + + private String debugPrivPrint(Privilege[] privileges) { + StringBuffer sb = new StringBuffer(); + sb.append("Privileges{"); + if (privileges != null){ + for (Privilege p : privileges){ + sb.append(p.toString()); + } + }else{ + sb.append("null"); + } + sb.append("}"); + return sb.toString(); + } + + @Override + public void setMetaStoreHandler(HMSHandler handler) { + debugLog("DHMAP.setMetaStoreHandler"); + } + + +} Index: ql/src/test/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java (revision 0) +++ ql/src/test/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java (revision 0) @@ -0,0 +1,105 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * + * InjectableDummyAuthenticator - An implementation of HiveMetastoreAuthenticationProvider + * that wraps another Authenticator, but when asked to inject a user provided username + * and groupnames, does so. This can be toggled back and forth to use in testing + */ +public class InjectableDummyAuthenticator implements HiveMetastoreAuthenticationProvider { + + private static String userName; + private static List groupNames; + private static boolean injectMode; + private static Class hmapClass = + HadoopDefaultMetastoreAuthenticator.class; + private HiveMetastoreAuthenticationProvider hmap; + + public static void injectHmapClass(Class clazz){ + hmapClass = clazz; + } + + public static void injectUserName(String user){ + userName = user; + } + + public static void injectGroupNames(List groups){ + groupNames = groups; + } + + public static void injectMode(boolean mode){ + injectMode = mode; + } + + @Override + public String getUserName() { + if (injectMode){ + return userName; + } else { + return hmap.getUserName(); + } + } + + @Override + public List getGroupNames() { + if (injectMode) { + return groupNames; + } else { + return hmap.getGroupNames(); + } + } + + @Override + public Configuration getConf() { + return hmap.getConf(); + } + + @Override + public void setConf(Configuration config) { + try { + hmap = (HiveMetastoreAuthenticationProvider) hmapClass.newInstance(); + } catch (InstantiationException e) { + throw new RuntimeException("Whoops, could not create an Authenticator of class " + + hmapClass.getName()); + } catch (IllegalAccessException e) { + throw new RuntimeException("Whoops, could not create an Authenticator of class " + + hmapClass.getName()); + } + + hmap.setConf(config); + } + + @Override + public void setMetaStoreHandler(HMSHandler handler) { + hmap.setMetaStoreHandler(handler); + } + + @Override + public void destroy() throws HiveException { + hmap.destroy(); + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (revision 1419361) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (working copy) @@ -321,10 +321,10 @@ @SuppressWarnings("unchecked") public static HiveAuthorizationProvider getAuthorizeProviderManager( - Configuration conf, HiveAuthenticationProvider authenticator) throws HiveException { + Configuration conf, HiveConf.ConfVars authorizationProviderConfKey, + HiveAuthenticationProvider authenticator) throws HiveException { - String clsStr = HiveConf.getVar(conf, - HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); + String clsStr = HiveConf.getVar(conf, authorizationProviderConfKey); HiveAuthorizationProvider ret = null; try { @@ -346,11 +346,11 @@ } @SuppressWarnings("unchecked") - public static HiveAuthenticationProvider getAuthenticator(Configuration conf) - throws HiveException { + public static HiveAuthenticationProvider getAuthenticator( + Configuration conf, HiveConf.ConfVars authenticatorConfKey + ) throws HiveException { - String clsStr = HiveConf.getVar(conf, - HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER); + String clsStr = HiveConf.getVar(conf, authenticatorConfKey); HiveAuthenticationProvider ret = null; try { @@ -370,6 +370,7 @@ return ret; } + /** * Convert FieldSchemas to columnNames with backticks around them. */ Index: ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (revision 1419361) +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (working copy) @@ -282,10 +282,11 @@ } try { - startSs.authenticator = HiveUtils.getAuthenticator(startSs - .getConf()); - startSs.authorizer = HiveUtils.getAuthorizeProviderManager(startSs - .getConf(), startSs.authenticator); + startSs.authenticator = HiveUtils.getAuthenticator( + startSs.getConf(),HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER); + startSs.authorizer = HiveUtils.getAuthorizeProviderManager( + startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, + startSs.authenticator); startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs .getConf()); } catch (HiveException e) { Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java (revision 0) @@ -0,0 +1,276 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.MetaStorePreEventListener; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.TableType; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.InvalidOperationException; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent; +import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent; +import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent; +import org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent; +import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent; +import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent; +import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent; +import org.apache.hadoop.hive.metastore.events.PreDropTableEvent; +import org.apache.hadoop.hive.metastore.events.PreEventContext; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider; + +/** + * AuthorizationPreEventListener : A MetaStorePreEventListener that + * performs authorization/authentication checks on the metastore-side. + * + * Note that this can only perform authorization checks on defined + * metastore PreEventContexts, such as the adding/dropping and altering + * of databases, tables and partitions. + */ +public class AuthorizationPreEventListener extends MetaStorePreEventListener { + + public static final Log LOG = LogFactory.getLog( + AuthorizationPreEventListener.class); + + private static HiveConf conf; + private static HiveMetastoreAuthorizationProvider authorizer; + private static HiveMetastoreAuthenticationProvider authenticator; + + public AuthorizationPreEventListener(Configuration config) throws HiveException { + super(config); + + authenticator = (HiveMetastoreAuthenticationProvider) HiveUtils.getAuthenticator( + config, HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER); + authorizer = (HiveMetastoreAuthorizationProvider) HiveUtils.getAuthorizeProviderManager( + config, HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER, authenticator); + } + + @Override + public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectException, + InvalidOperationException { + + authenticator.setMetaStoreHandler(context.getHandler()); + authorizer.setMetaStoreHandler(context.getHandler()); + + switch (context.getEventType()) { + case CREATE_TABLE: + authorizeCreateTable((PreCreateTableEvent)context); + break; + case DROP_TABLE: + authorizeDropTable((PreDropTableEvent)context); + break; + case ALTER_TABLE: + authorizeAlterTable((PreAlterTableEvent)context); + break; + case ADD_PARTITION: + authorizeAddPartition((PreAddPartitionEvent)context); + break; + case DROP_PARTITION: + authorizeDropPartition((PreDropPartitionEvent)context); + break; + case ALTER_PARTITION: + authorizeAlterPartition((PreAlterPartitionEvent)context); + break; + case CREATE_DATABASE: + authorizeCreateDatabase((PreCreateDatabaseEvent)context); + break; + case DROP_DATABASE: + authorizeDropDatabase((PreDropDatabaseEvent)context); + break; + case LOAD_PARTITION_DONE: + // noop for now + break; + default: + break; + } + + } + + private void authorizeCreateDatabase(PreCreateDatabaseEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(new Database(context.getDatabase()), + HiveOperation.CREATEDATABASE.getInputRequiredPrivileges(), + HiveOperation.CREATEDATABASE.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + + private void authorizeDropDatabase(PreDropDatabaseEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(new Database(context.getDatabase()), + HiveOperation.DROPDATABASE.getInputRequiredPrivileges(), + HiveOperation.DROPDATABASE.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + + private void authorizeCreateTable(PreCreateTableEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(getTableFromApiTable(context.getTable()), + HiveOperation.CREATETABLE.getInputRequiredPrivileges(), + HiveOperation.CREATETABLE.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + + private void authorizeDropTable(PreDropTableEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(getTableFromApiTable(context.getTable()), + HiveOperation.DROPTABLE.getInputRequiredPrivileges(), + HiveOperation.DROPTABLE.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + + private void authorizeAlterTable(PreAlterTableEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(getTableFromApiTable(context.getOldTable()), + null, + new Privilege[]{Privilege.ALTER_METADATA}); + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + + private void authorizeAddPartition(PreAddPartitionEvent context) + throws InvalidOperationException, MetaException { + try { + org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getPartition(); + authorizer.authorize(getPartitionFromApiPartition(mapiPart, context), + HiveOperation.ALTERTABLE_ADDPARTS.getInputRequiredPrivileges(), + HiveOperation.ALTERTABLE_ADDPARTS.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (NoSuchObjectException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + + private void authorizeDropPartition(PreDropPartitionEvent context) + throws InvalidOperationException, MetaException { + try { + org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getPartition(); + authorizer.authorize(getPartitionFromApiPartition(mapiPart, context), + HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(), + HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (NoSuchObjectException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + + private void authorizeAlterPartition(PreAlterPartitionEvent context) + throws InvalidOperationException, MetaException { + try { + org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getNewPartition(); + authorizer.authorize(getPartitionFromApiPartition(mapiPart, context), + null, + new Privilege[]{Privilege.ALTER_METADATA}); + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (NoSuchObjectException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + + private Table getTableFromApiTable(org.apache.hadoop.hive.metastore.api.Table apiTable) { + org.apache.hadoop.hive.metastore.api.Table tTable = apiTable.deepCopy(); + if (tTable.getTableType() == null){ + // TableType specified was null, we need to figure out what type it was. + if (MetaStoreUtils.isExternalTable(tTable)){ + tTable.setTableType(TableType.EXTERNAL_TABLE.toString()); + } else if (MetaStoreUtils.isIndexTable(tTable)) { + tTable.setTableType(TableType.INDEX_TABLE.toString()); + } else if ((tTable.getSd() == null) || (tTable.getSd().getLocation() == null)) { + tTable.setTableType(TableType.VIRTUAL_VIEW.toString()); + } else { + tTable.setTableType(TableType.MANAGED_TABLE.toString()); + } + } + Table tbl = new Table(tTable); + return tbl; + } + + private Partition getPartitionFromApiPartition( + org.apache.hadoop.hive.metastore.api.Partition mapiPart, + PreEventContext context) throws HiveException, NoSuchObjectException, MetaException { + org.apache.hadoop.hive.metastore.api.Partition tPart = mapiPart.deepCopy(); + org.apache.hadoop.hive.metastore.api.Table t = context.getHandler().get_table( + mapiPart.getDbName(), mapiPart.getTableName()); + if (tPart.getSd() == null){ + // In the cases of create partition, by the time this event fires, the partition + // object has not yet come into existence, and thus will not yet have a + // location or an SD, but these are needed to create a ql.metadata.Partition, + // so we use the table's SD. The only place this is used is by the + // authorization hooks, so we will not affect code flow in the metastore itself. + tPart.setSd(t.getSd()); + } + return new Partition(getTableFromApiTable(t),tPart); + } + + private InvalidOperationException invalidOperationException(Exception e) { + InvalidOperationException ex = new InvalidOperationException(e.getMessage()); + ex.initCause(e.getCause()); + return ex; + } + + private MetaException metaException(HiveException e) { + MetaException ex = new MetaException(e.getMessage()); + ex.initCause(e); + return ex; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java (revision 0) @@ -0,0 +1,338 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.security.AccessControlException; +import java.util.EnumSet; +import java.util.List; + +import javax.security.auth.login.LoginException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; + +/** + * StorageBasedAuthorizationProvider is an implementation of + * HiveMetastoreAuthorizationProvider that tries to look at the hdfs + * permissions of files and directories associated with objects like + * databases, tables and partitions to determine whether or not an + * operation is allowed. The rule of thumb for which location to check + * in hdfs is as follows: + * + * CREATE : on location specified, or on location determined from metadata + * READS : not checked (the preeventlistener does not have an event to fire) + * UPDATES : on location in metadata + * DELETES : on location in metadata + * + * If the location does not yet exist, as the case is with creates, it steps + * out to the parent directory recursively to determine its permissions till + * it finds a parent that does exist. + */ +public class StorageBasedAuthorizationProvider extends HiveAuthorizationProviderBase + implements HiveMetastoreAuthorizationProvider { + + private Warehouse wh; + + @Override + public void init(Configuration conf) throws HiveException { + hive_db = new HiveProxy(); + } + + @Override + public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + // Currently not used in hive code-base, but intended to authorize actions + // that are directly user-level. As there's no storage based aspect to this, + // we can follow one of two routes: + // a) We can allow by default - that way, this call stays out of the way + // b) We can deny by default - that way, no privileges are authorized that + // is not understood and explicitly allowed. + // Both approaches have merit, but given that things like grants and revokes + // that are user-level do not make sense from the context of storage-permission + // based auth, denying seems to be more canonical here. + + throw new AuthorizationException(StorageBasedAuthorizationProvider.class.getName() + + " does not allow user-level authorization"); + } + + @Override + public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + Path path = getDbLocation(db); + authorize(path, readRequiredPriv, writeRequiredPriv); + } + + @Override + public void authorize(Table table, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + + // Table path can be null in the case of a new create table - in this case, + // we try to determine what the path would be after the create table is issued. + Path path = null; + try { + String location = table.getTTable().getSd().getLocation(); + if (location == null || location.isEmpty()) { + path = wh.getTablePath(hive_db.getDatabase(table.getDbName()), table.getTableName()); + } else { + path = new Path(location); + } + } catch (MetaException ex) { + throw hiveException(ex); + } + + authorize(path, readRequiredPriv, writeRequiredPriv); + } + + @Override + public void authorize(Partition part, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + authorize(part.getTable(), part, readRequiredPriv, writeRequiredPriv); + } + + private void authorize(Table table, Partition part, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + + // Partition path can be null in the case of a new create partition - in this case, + // we try to default to checking the permissions of the parent table + if (part.getLocation() == null) { + authorize(table, readRequiredPriv, writeRequiredPriv); + } else { + authorize(part.getPartitionPath(), readRequiredPriv, writeRequiredPriv); + } + } + + @Override + public void authorize(Table table, Partition part, List columns, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + // In a simple storage-based auth, we have no information about columns + // living in different files, so we do simple partition-auth and ignore + // the columns parameter. + + authorize(part.getTable(), part, readRequiredPriv, writeRequiredPriv); + } + + @Override + public void setMetaStoreHandler(HMSHandler handler) { + hive_db.setHandler(handler); + this.wh = handler.getWh(); + } + + /** + * Given a privilege, return what FsActions are required + */ + protected FsAction getFsAction(Privilege priv) { + + switch (priv.getPriv()) { + case ALL: + return FsAction.READ_WRITE; + case ALTER_DATA: + return FsAction.WRITE; + case ALTER_METADATA: + return FsAction.WRITE; + case CREATE: + return FsAction.WRITE; + case DROP: + return FsAction.WRITE; + case INDEX: + throw new AuthorizationException( + "StorageBasedAuthorizationProvider cannot handle INDEX privilege"); + case LOCK: + throw new AuthorizationException( + "StorageBasedAuthorizationProvider cannot handle LOCK privilege"); + case SELECT: + return FsAction.READ; + case SHOW_DATABASE: + return FsAction.READ; + case UNKNOWN: + default: + throw new AuthorizationException("Unknown privilege"); + } + } + + /** + * Given a Privilege[], find out what all FsActions are required + */ + protected EnumSet getFsActions(Privilege[] privs) { + EnumSet actions = EnumSet.noneOf(FsAction.class); + + if (privs == null) { + return actions; + } + + for (Privilege priv : privs) { + actions.add(getFsAction(priv)); + } + + return actions; + } + + /** + * Authorization privileges against a path. + * + * @param path + * a filesystem path + * @param readRequiredPriv + * a list of privileges needed for inputs. + * @param writeRequiredPriv + * a list of privileges needed for outputs. + */ + public void authorize(Path path, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + try { + EnumSet actions = getFsActions(readRequiredPriv); + actions.addAll(getFsActions(writeRequiredPriv)); + if (actions.isEmpty()) { + return; + } + + checkPermissions(getConf(), path, actions); + + } catch (AccessControlException ex) { + throw authorizationException(ex); + } catch (LoginException ex) { + throw authorizationException(ex); + } catch (IOException ex) { + throw hiveException(ex); + } + } + + + /** + * Checks the permissions for the given path and current user on Hadoop FS. + * If the given path does not exists, it checks for its parent folder. + */ + protected void checkPermissions(final Configuration conf, final Path path, + final EnumSet actions) throws IOException, LoginException { + + if (path == null) { + throw new IllegalArgumentException("path is null"); + } + + final FileSystem fs = path.getFileSystem(conf); + + if (fs.exists(path)) { + checkPermissions(fs, path, actions, + authenticator.getUserName(), authenticator.getGroupNames()); + } else if (path.getParent() != null) { + // find the ancestor which exists to check its permissions + Path par = path.getParent(); + while (par != null) { + if (fs.exists(par)) { + break; + } + par = par.getParent(); + } + + checkPermissions(fs, par, actions, + authenticator.getUserName(), authenticator.getGroupNames()); + } + } + + /** + * Checks the permissions for the given path and current user on Hadoop FS. If the given path + * does not exists, it returns. + */ + @SuppressWarnings("deprecation") + protected static void checkPermissions(final FileSystem fs, final Path path, + final EnumSet actions, String user, List groups) throws IOException, + AccessControlException { + + final FileStatus stat; + + try { + stat = fs.getFileStatus(path); + } catch (FileNotFoundException fnfe) { + // File named by path doesn't exist; nothing to validate. + return; + } catch (org.apache.hadoop.fs.permission.AccessControlException ace) { + // Older hadoop version will throw this @deprecated Exception. + throw accessControlException(ace); + } + + final FsPermission dirPerms = stat.getPermission(); + final String grp = stat.getGroup(); + + for (FsAction action : actions) { + if (user.equals(stat.getOwner())) { + if (dirPerms.getUserAction().implies(action)) { + continue; + } + } + if (groups.contains(grp)) { + if (dirPerms.getGroupAction().implies(action)) { + continue; + } + } + if (dirPerms.getOtherAction().implies(action)) { + continue; + } + throw new AccessControlException("action " + action + " not permitted on path " + + path + " for user " + user); + } + } + + protected Path getDbLocation(Database db) throws HiveException { + try { + String location = db.getLocationUri(); + if (location == null) { + return wh.getDefaultDatabasePath(db.getName()); + } else { + return wh.getDnsPath(wh.getDatabasePath(db)); + } + } catch (MetaException ex) { + throw hiveException(ex); + } + } + + private HiveException hiveException(Exception e) { + HiveException ex = new HiveException(e); + ex.initCause(e); + return ex; + } + + private AuthorizationException authorizationException(Exception e) { + AuthorizationException ex = new AuthorizationException(e); + ex.initCause(e); + return ex; + } + + private static AccessControlException accessControlException( + org.apache.hadoop.fs.permission.AccessControlException e) { + AccessControlException ace = new AccessControlException(e.getMessage()); + ace.initCause(e); + return ace; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java (revision 0) @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +public class DefaultHiveMetastoreAuthorizationProvider extends BitSetCheckedAuthorizationProvider + implements HiveMetastoreAuthorizationProvider { + + @Override + public void init(Configuration conf) throws HiveException { + hive_db = new HiveProxy(); + } + + @Override + public void setMetaStoreHandler(HMSHandler handler) { + hive_db.setHandler(handler); + } + + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java (revision 1419361) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java (working copy) @@ -18,21 +18,90 @@ package org.apache.hadoop.hive.ql.security.authorization; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; +import org.apache.hadoop.hive.metastore.api.HiveObjectType; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.thrift.TException; public abstract class HiveAuthorizationProviderBase implements HiveAuthorizationProvider { - + + protected class HiveProxy { + + private final Hive hiveClient; + private HMSHandler handler; + + public HiveProxy(Hive hive) { + this.hiveClient = hive; + this.handler = null; + } + + public HiveProxy() { + this.hiveClient = null; + this.handler = null; + } + + public void setHandler(HMSHandler handler){ + this.handler = handler; + } + + public PrincipalPrivilegeSet get_privilege_set(HiveObjectType column, String dbName, + String tableName, List partValues, String col, String userName, + List groupNames) throws HiveException { + if (hiveClient != null) { + return hiveClient.get_privilege_set( + column, dbName, tableName, partValues, col, userName, groupNames); + } else { + HiveObjectRef hiveObj = new HiveObjectRef(column, dbName, + tableName, partValues, col); + try { + return handler.get_privilege_set(hiveObj, userName, groupNames); + } catch (MetaException e) { + throw new HiveException(e); + } catch (TException e) { + throw new HiveException(e); + } + } + } + + public Database getDatabase(String dbName) throws HiveException { + if (hiveClient != null) { + return hiveClient.getDatabase(dbName); + } else { + try { + return handler.get_database(dbName); + } catch (NoSuchObjectException e) { + throw new HiveException(e); + } catch (MetaException e) { + throw new HiveException(e); + } + } + } + + } + + protected HiveProxy hive_db; + protected HiveAuthenticationProvider authenticator; - protected Hive hive_db; - private Configuration conf; + public static final Log LOG = LogFactory.getLog( + HiveAuthorizationProvider.class); + + public void setConf(Configuration conf) { this.conf = conf; try { @@ -42,10 +111,6 @@ } } - public void init(Configuration conf) throws HiveException { - hive_db = Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class)); - } - public Configuration getConf() { return this.conf; } Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java (revision 0) @@ -0,0 +1,40 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; + +/** + * HiveMetastoreAuthorizationProvider : An extension of HiveAuthorizaytionProvider + * that is intended to be called from the metastore-side. It will be invoked + * by AuthorizationPreEventListener. + * + */ +public interface HiveMetastoreAuthorizationProvider extends HiveAuthorizationProvider { + + /** + * Allows invoker of HiveMetaStoreAuthorizationProvider to send in a + * hive metastore handler that can be used to make calls to test + * whether or not authorizations can/will succeed. Intended to be called + * before any of the authorize methods are called. + * @param handler + */ + void setMetaStoreHandler(HMSHandler handler); + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java (revision 1419361) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java (working copy) @@ -18,478 +18,16 @@ package org.apache.hadoop.hive.ql.security.authorization; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.api.HiveObjectType; -import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; -import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; -import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Partition; -import org.apache.hadoop.hive.ql.metadata.Table; public class DefaultHiveAuthorizationProvider extends - HiveAuthorizationProviderBase { + BitSetCheckedAuthorizationProvider { - static class BitSetChecker { - - boolean[] inputCheck = null; - boolean[] outputCheck = null; - - public static BitSetChecker getBitSetChecker(Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) { - BitSetChecker checker = new BitSetChecker(); - if (inputRequiredPriv != null) { - checker.inputCheck = new boolean[inputRequiredPriv.length]; - for (int i = 0; i < checker.inputCheck.length; i++) { - checker.inputCheck[i] = false; - } - } - if (outputRequiredPriv != null) { - checker.outputCheck = new boolean[outputRequiredPriv.length]; - for (int i = 0; i < checker.outputCheck.length; i++) { - checker.outputCheck[i] = false; - } - } - - return checker; - } - + public void init(Configuration conf) throws HiveException { + hive_db = new HiveProxy(Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class))); } - @Override - public void authorize(Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException { - - BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, - outputRequiredPriv); - boolean[] inputCheck = checker.inputCheck; - boolean[] outputCheck = checker.outputCheck; - - authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv, - outputCheck); - checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck, null, null, null, null); - } - - @Override - public void authorize(Database db, Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException { - - BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, - outputRequiredPriv); - boolean[] inputCheck = checker.inputCheck; - boolean[] outputCheck = checker.outputCheck; - - authorizeUserAndDBPriv(db, inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck); - - checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck, db.getName(), null, null, null); - } - - @Override - public void authorize(Table table, Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) throws HiveException { - BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, - outputRequiredPriv); - boolean[] inputCheck = checker.inputCheck; - boolean[] outputCheck = checker.outputCheck; - - authorizeUserDBAndTable(table, inputRequiredPriv, - outputRequiredPriv, inputCheck, outputCheck); - checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck, table.getDbName(), table.getTableName(), - null, null); - } - - @Override - public void authorize(Partition part, Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) throws HiveException { - - //if the partition does not have partition level privilege, go to table level. - Table table = part.getTable(); - if (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") == null || ("FALSE" - .equalsIgnoreCase(table.getParameters().get( - "PARTITION_LEVEL_PRIVILEGE")))) { - this.authorize(part.getTable(), inputRequiredPriv, outputRequiredPriv); - return; - } - - BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, - outputRequiredPriv); - boolean[] inputCheck = checker.inputCheck; - boolean[] outputCheck = checker.outputCheck; - - if (authorizeUserDbAndPartition(part, inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck)){ - return; - } - - checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck, part.getTable().getDbName(), part - .getTable().getTableName(), part.getName(), null); - } - - @Override - public void authorize(Table table, Partition part, List columns, - Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv) - throws HiveException { - BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, - outputRequiredPriv); - boolean[] inputCheck = checker.inputCheck; - boolean[] outputCheck = checker.outputCheck; - - String partName = null; - List partValues = null; - if (part != null - && (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE" - .equalsIgnoreCase(table.getParameters().get( - "PARTITION_LEVEL_PRIVILEGE"))))) { - partName = part.getName(); - partValues = part.getValues(); - } - - if (partValues == null) { - if (authorizeUserDBAndTable(table, inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck)) { - return; - } - } else { - if (authorizeUserDbAndPartition(part, inputRequiredPriv, - outputRequiredPriv, inputCheck, outputCheck)) { - return; - } - } - - for (String col : columns) { - - BitSetChecker checker2 = BitSetChecker.getBitSetChecker( - inputRequiredPriv, outputRequiredPriv); - boolean[] inputCheck2 = checker2.inputCheck; - boolean[] outputCheck2 = checker2.outputCheck; - - PrincipalPrivilegeSet partColumnPrivileges = hive_db - .get_privilege_set(HiveObjectType.COLUMN, table.getDbName(), table.getTableName(), - partValues, col, this.getAuthenticator().getUserName(), this - .getAuthenticator().getGroupNames()); - - authorizePrivileges(partColumnPrivileges, inputRequiredPriv, inputCheck2, - outputRequiredPriv, outputCheck2); - - if (inputCheck2 != null) { - booleanArrayOr(inputCheck2, inputCheck); - } - if (outputCheck2 != null) { - booleanArrayOr(inputCheck2, inputCheck); - } - - checkAndThrowAuthorizationException(inputRequiredPriv, - outputRequiredPriv, inputCheck2, outputCheck2, table.getDbName(), - table.getTableName(), partName, col); - } - } - - protected boolean authorizeUserPriv(Privilege[] inputRequiredPriv, - boolean[] inputCheck, Privilege[] outputRequiredPriv, - boolean[] outputCheck) throws HiveException { - PrincipalPrivilegeSet privileges = hive_db.get_privilege_set( - HiveObjectType.GLOBAL, null, null, null, null, this.getAuthenticator() - .getUserName(), this.getAuthenticator().getGroupNames()); - return authorizePrivileges(privileges, inputRequiredPriv, inputCheck, - outputRequiredPriv, outputCheck); - } - - /** - * Check privileges on User and DB. This is used before doing a check on - * table/partition objects, first check the user and DB privileges. If it - * passed on this check, no need to check against the table/partition hive - * object. - * - * @param db - * @param inputRequiredPriv - * @param outputRequiredPriv - * @param inputCheck - * @param outputCheck - * @return true if the check on user and DB privilege passed, which means no - * need for privilege check on concrete hive objects. - * @throws HiveException - */ - private boolean authorizeUserAndDBPriv(Database db, - Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, - boolean[] inputCheck, boolean[] outputCheck) throws HiveException { - if (authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv, - outputCheck)) { - return true; - } - - PrincipalPrivilegeSet dbPrivileges = hive_db.get_privilege_set( - HiveObjectType.DATABASE, db.getName(), null, null, null, this - .getAuthenticator().getUserName(), this.getAuthenticator() - .getGroupNames()); - - if (authorizePrivileges(dbPrivileges, inputRequiredPriv, inputCheck, - outputRequiredPriv, outputCheck)) { - return true; - } - - return false; - } - - /** - * Check privileges on User, DB and table objects. - * - * @param table - * @param inputRequiredPriv - * @param outputRequiredPriv - * @param inputCheck - * @param outputCheck - * @return true if the check passed - * @throws HiveException - */ - private boolean authorizeUserDBAndTable(Table table, - Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, - boolean[] inputCheck, boolean[] outputCheck) throws HiveException { - - if (authorizeUserAndDBPriv(hive_db.getDatabase(table.getDbName()), - inputRequiredPriv, outputRequiredPriv, inputCheck, outputCheck)) { - return true; - } - - PrincipalPrivilegeSet tablePrivileges = hive_db.get_privilege_set( - HiveObjectType.TABLE, table.getDbName(), table.getTableName(), null, - null, this.getAuthenticator().getUserName(), this.getAuthenticator() - .getGroupNames()); - - if (authorizePrivileges(tablePrivileges, inputRequiredPriv, inputCheck, - outputRequiredPriv, outputCheck)) { - return true; - } - - return false; - } - - /** - * Check privileges on User, DB and table/Partition objects. - * - * @param part - * @param inputRequiredPriv - * @param outputRequiredPriv - * @param inputCheck - * @param outputCheck - * @return true if the check passed - * @throws HiveException - */ - private boolean authorizeUserDbAndPartition(Partition part, - Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, - boolean[] inputCheck, boolean[] outputCheck) throws HiveException { - - if (authorizeUserAndDBPriv( - hive_db.getDatabase(part.getTable().getDbName()), inputRequiredPriv, - outputRequiredPriv, inputCheck, outputCheck)) { - return true; - } - - PrincipalPrivilegeSet partPrivileges = part.getTPartition().getPrivileges(); - if (partPrivileges == null) { - partPrivileges = hive_db.get_privilege_set(HiveObjectType.PARTITION, part - .getTable().getDbName(), part.getTable().getTableName(), part - .getValues(), null, this.getAuthenticator().getUserName(), this - .getAuthenticator().getGroupNames()); - } - - if (authorizePrivileges(partPrivileges, inputRequiredPriv, inputCheck, - outputRequiredPriv, outputCheck)) { - return true; - } - - return false; - } - - protected boolean authorizePrivileges(PrincipalPrivilegeSet privileges, - Privilege[] inputPriv, boolean[] inputCheck, Privilege[] outputPriv, - boolean[] outputCheck) throws HiveException { - - boolean pass = true; - if (inputPriv != null) { - pass = pass && matchPrivs(inputPriv, privileges, inputCheck); - } - if (outputPriv != null) { - pass = pass && matchPrivs(outputPriv, privileges, outputCheck); - } - return pass; - } - - /** - * try to match an array of privileges from user/groups/roles grants. - * - * @param container - */ - private boolean matchPrivs(Privilege[] inputPriv, - PrincipalPrivilegeSet privileges, boolean[] check) { - - if (inputPriv == null) - return true; - - if (privileges == null) - return false; - - /* - * user grants - */ - Set privSet = new HashSet(); - if (privileges.getUserPrivileges() != null - && privileges.getUserPrivileges().size() > 0) { - Collection> privCollection = privileges.getUserPrivileges().values(); - - List userPrivs = getPrivilegeStringList(privCollection); - if (userPrivs != null && userPrivs.size() > 0) { - for (String priv : userPrivs) { - if (priv == null || priv.trim().equals("")) - continue; - if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { - setBooleanArray(check, true); - return true; - } - privSet.add(priv.toLowerCase()); - } - } - } - - /* - * group grants - */ - if (privileges.getGroupPrivileges() != null - && privileges.getGroupPrivileges().size() > 0) { - Collection> groupPrivCollection = privileges - .getGroupPrivileges().values(); - List groupPrivs = getPrivilegeStringList(groupPrivCollection); - if (groupPrivs != null && groupPrivs.size() > 0) { - for (String priv : groupPrivs) { - if (priv == null || priv.trim().equals("")) - continue; - if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { - setBooleanArray(check, true); - return true; - } - privSet.add(priv.toLowerCase()); - } - } - } - - /* - * roles grants - */ - if (privileges.getRolePrivileges() != null - && privileges.getRolePrivileges().size() > 0) { - Collection> rolePrivsCollection = privileges - .getRolePrivileges().values(); - ; - List rolePrivs = getPrivilegeStringList(rolePrivsCollection); - if (rolePrivs != null && rolePrivs.size() > 0) { - for (String priv : rolePrivs) { - if (priv == null || priv.trim().equals("")) - continue; - if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { - setBooleanArray(check, true); - return true; - } - privSet.add(priv.toLowerCase()); - } - } - } - - for (int i = 0; i < inputPriv.length; i++) { - String toMatch = inputPriv[i].toString(); - if (!check[i]) { - check[i] = privSet.contains(toMatch.toLowerCase()); - } - } - - return firstFalseIndex(check) <0; - } - - private List getPrivilegeStringList( - Collection> privCollection) { - List userPrivs = new ArrayList(); - if (privCollection!= null && privCollection.size()>0) { - for (List grantList : privCollection) { - if (grantList == null){ - continue; - } - for (int i = 0; i < grantList.size(); i++) { - PrivilegeGrantInfo grant = grantList.get(i); - userPrivs.add(grant.getPrivilege()); - } - } - } - return userPrivs; - } - - private static void setBooleanArray(boolean[] check, boolean b) { - for (int i = 0; i < check.length; i++) { - check[i] = b; - } - } - - private static void booleanArrayOr(boolean[] output, boolean[] input) { - for (int i = 0; i < output.length && i < input.length; i++) { - output[i] = output[i] || input[i]; - } - } - - private void checkAndThrowAuthorizationException( - Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, - boolean[] inputCheck, boolean[] outputCheck,String dbName, - String tableName, String partitionName, String columnName) { - - String hiveObject = "{ "; - if (dbName != null) { - hiveObject = hiveObject + "database:" + dbName; - } - if (tableName != null) { - hiveObject = hiveObject + ", table:" + tableName; - } - if (partitionName != null) { - hiveObject = hiveObject + ", partitionName:" + partitionName; - } - if (columnName != null) { - hiveObject = hiveObject + ", columnName:" + columnName; - } - hiveObject = hiveObject + "}"; - - if (inputCheck != null) { - int input = this.firstFalseIndex(inputCheck); - if (input >= 0) { - throw new AuthorizationException("No privilege '" - + inputRequiredPriv[input].toString() + "' found for inputs " - + hiveObject); - } - } - - if (outputCheck != null) { - int output = this.firstFalseIndex(outputCheck); - if (output >= 0) { - throw new AuthorizationException("No privilege '" - + outputRequiredPriv[output].toString() + "' found for outputs " - + hiveObject); - } - } - } - - private int firstFalseIndex(boolean[] inputCheck) { - if (inputCheck != null) { - for (int i = 0; i < inputCheck.length; i++) { - if (!inputCheck[i]) { - return i; - } - } - } - return -1; - } } Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java (revision 0) @@ -0,0 +1,502 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.HiveObjectType; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; + +public abstract class BitSetCheckedAuthorizationProvider extends + HiveAuthorizationProviderBase { + + static class BitSetChecker { + + boolean[] inputCheck = null; + boolean[] outputCheck = null; + + public static BitSetChecker getBitSetChecker(Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) { + BitSetChecker checker = new BitSetChecker(); + if (inputRequiredPriv != null) { + checker.inputCheck = new boolean[inputRequiredPriv.length]; + for (int i = 0; i < checker.inputCheck.length; i++) { + checker.inputCheck[i] = false; + } + } + if (outputRequiredPriv != null) { + checker.outputCheck = new boolean[outputRequiredPriv.length]; + for (int i = 0; i < checker.outputCheck.length; i++) { + checker.outputCheck[i] = false; + } + } + + return checker; + } + + } + + @Override + public void authorize(Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException { + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv, + outputCheck); + checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck, null, null, null, null); + } + + @Override + public void authorize(Database db, Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException { + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + authorizeUserAndDBPriv(db, inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck); + + checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck, db.getName(), null, null, null); + } + + @Override + public void authorize(Table table, Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException { + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + authorizeUserDBAndTable(table, inputRequiredPriv, + outputRequiredPriv, inputCheck, outputCheck); + checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck, table.getDbName(), table.getTableName(), + null, null); + } + + @Override + public void authorize(Partition part, Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException { + + //if the partition does not have partition level privilege, go to table level. + Table table = part.getTable(); + if (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") == null || ("FALSE" + .equalsIgnoreCase(table.getParameters().get( + "PARTITION_LEVEL_PRIVILEGE")))) { + this.authorize(part.getTable(), inputRequiredPriv, outputRequiredPriv); + return; + } + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + if (authorizeUserDbAndPartition(part, inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck)){ + return; + } + + checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck, part.getTable().getDbName(), part + .getTable().getTableName(), part.getName(), null); + } + + @Override + public void authorize(Table table, Partition part, List columns, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv) + throws HiveException { + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + String partName = null; + List partValues = null; + if (part != null + && (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE" + .equalsIgnoreCase(table.getParameters().get( + "PARTITION_LEVEL_PRIVILEGE"))))) { + partName = part.getName(); + partValues = part.getValues(); + } + + if (partValues == null) { + if (authorizeUserDBAndTable(table, inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck)) { + return; + } + } else { + if (authorizeUserDbAndPartition(part, inputRequiredPriv, + outputRequiredPriv, inputCheck, outputCheck)) { + return; + } + } + + for (String col : columns) { + + BitSetChecker checker2 = BitSetChecker.getBitSetChecker( + inputRequiredPriv, outputRequiredPriv); + boolean[] inputCheck2 = checker2.inputCheck; + boolean[] outputCheck2 = checker2.outputCheck; + + PrincipalPrivilegeSet partColumnPrivileges = hive_db + .get_privilege_set(HiveObjectType.COLUMN, table.getDbName(), table.getTableName(), + partValues, col, this.getAuthenticator().getUserName(), this + .getAuthenticator().getGroupNames()); + + authorizePrivileges(partColumnPrivileges, inputRequiredPriv, inputCheck2, + outputRequiredPriv, outputCheck2); + + if (inputCheck2 != null) { + booleanArrayOr(inputCheck2, inputCheck); + } + if (outputCheck2 != null) { + booleanArrayOr(inputCheck2, inputCheck); + } + + checkAndThrowAuthorizationException(inputRequiredPriv, + outputRequiredPriv, inputCheck2, outputCheck2, table.getDbName(), + table.getTableName(), partName, col); + } + } + + protected boolean authorizeUserPriv(Privilege[] inputRequiredPriv, + boolean[] inputCheck, Privilege[] outputRequiredPriv, + boolean[] outputCheck) throws HiveException { + PrincipalPrivilegeSet privileges = hive_db.get_privilege_set( + HiveObjectType.GLOBAL, null, null, null, null, this.getAuthenticator() + .getUserName(), this.getAuthenticator().getGroupNames()); + return authorizePrivileges(privileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck); + } + + /** + * Check privileges on User and DB. This is used before doing a check on + * table/partition objects, first check the user and DB privileges. If it + * passed on this check, no need to check against the table/partition hive + * object. + * + * @param db + * @param inputRequiredPriv + * @param outputRequiredPriv + * @param inputCheck + * @param outputCheck + * @return true if the check on user and DB privilege passed, which means no + * need for privilege check on concrete hive objects. + * @throws HiveException + */ + private boolean authorizeUserAndDBPriv(Database db, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + boolean[] inputCheck, boolean[] outputCheck) throws HiveException { + if (authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv, + outputCheck)) { + return true; + } + + PrincipalPrivilegeSet dbPrivileges = hive_db.get_privilege_set( + HiveObjectType.DATABASE, db.getName(), null, null, null, this + .getAuthenticator().getUserName(), this.getAuthenticator() + .getGroupNames()); + + if (authorizePrivileges(dbPrivileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck)) { + return true; + } + + return false; + } + + /** + * Check privileges on User, DB and table objects. + * + * @param table + * @param inputRequiredPriv + * @param outputRequiredPriv + * @param inputCheck + * @param outputCheck + * @return true if the check passed + * @throws HiveException + */ + private boolean authorizeUserDBAndTable(Table table, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + boolean[] inputCheck, boolean[] outputCheck) throws HiveException { + + if (authorizeUserAndDBPriv(hive_db.getDatabase(table.getDbName()), + inputRequiredPriv, outputRequiredPriv, inputCheck, outputCheck)) { + return true; + } + + PrincipalPrivilegeSet tablePrivileges = hive_db.get_privilege_set( + HiveObjectType.TABLE, table.getDbName(), table.getTableName(), null, + null, this.getAuthenticator().getUserName(), this.getAuthenticator() + .getGroupNames()); + + if (authorizePrivileges(tablePrivileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck)) { + return true; + } + + return false; + } + + /** + * Check privileges on User, DB and table/Partition objects. + * + * @param part + * @param inputRequiredPriv + * @param outputRequiredPriv + * @param inputCheck + * @param outputCheck + * @return true if the check passed + * @throws HiveException + */ + private boolean authorizeUserDbAndPartition(Partition part, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + boolean[] inputCheck, boolean[] outputCheck) throws HiveException { + + if (authorizeUserAndDBPriv( + hive_db.getDatabase(part.getTable().getDbName()), inputRequiredPriv, + outputRequiredPriv, inputCheck, outputCheck)) { + return true; + } + + PrincipalPrivilegeSet partPrivileges = part.getTPartition().getPrivileges(); + if (partPrivileges == null) { + partPrivileges = hive_db.get_privilege_set(HiveObjectType.PARTITION, part + .getTable().getDbName(), part.getTable().getTableName(), part + .getValues(), null, this.getAuthenticator().getUserName(), this + .getAuthenticator().getGroupNames()); + } + + if (authorizePrivileges(partPrivileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck)) { + return true; + } + + return false; + } + + protected boolean authorizePrivileges(PrincipalPrivilegeSet privileges, + Privilege[] inputPriv, boolean[] inputCheck, Privilege[] outputPriv, + boolean[] outputCheck) throws HiveException { + + boolean pass = true; + if (inputPriv != null) { + pass = pass && matchPrivs(inputPriv, privileges, inputCheck); + } + if (outputPriv != null) { + pass = pass && matchPrivs(outputPriv, privileges, outputCheck); + } + return pass; + } + + /** + * try to match an array of privileges from user/groups/roles grants. + * + * @param container + */ + private boolean matchPrivs(Privilege[] inputPriv, + PrincipalPrivilegeSet privileges, boolean[] check) { + + if (inputPriv == null) { + return true; + } + + if (privileges == null) { + return false; + } + + /* + * user grants + */ + Set privSet = new HashSet(); + if (privileges.getUserPrivileges() != null + && privileges.getUserPrivileges().size() > 0) { + Collection> privCollection = privileges.getUserPrivileges().values(); + + List userPrivs = getPrivilegeStringList(privCollection); + if (userPrivs != null && userPrivs.size() > 0) { + for (String priv : userPrivs) { + if (priv == null || priv.trim().equals("")) { + continue; + } + if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { + setBooleanArray(check, true); + return true; + } + privSet.add(priv.toLowerCase()); + } + } + } + + /* + * group grants + */ + if (privileges.getGroupPrivileges() != null + && privileges.getGroupPrivileges().size() > 0) { + Collection> groupPrivCollection = privileges + .getGroupPrivileges().values(); + List groupPrivs = getPrivilegeStringList(groupPrivCollection); + if (groupPrivs != null && groupPrivs.size() > 0) { + for (String priv : groupPrivs) { + if (priv == null || priv.trim().equals("")) { + continue; + } + if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { + setBooleanArray(check, true); + return true; + } + privSet.add(priv.toLowerCase()); + } + } + } + + /* + * roles grants + */ + if (privileges.getRolePrivileges() != null + && privileges.getRolePrivileges().size() > 0) { + Collection> rolePrivsCollection = privileges + .getRolePrivileges().values(); + ; + List rolePrivs = getPrivilegeStringList(rolePrivsCollection); + if (rolePrivs != null && rolePrivs.size() > 0) { + for (String priv : rolePrivs) { + if (priv == null || priv.trim().equals("")) { + continue; + } + if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { + setBooleanArray(check, true); + return true; + } + privSet.add(priv.toLowerCase()); + } + } + } + + for (int i = 0; i < inputPriv.length; i++) { + String toMatch = inputPriv[i].toString(); + if (!check[i]) { + check[i] = privSet.contains(toMatch.toLowerCase()); + } + } + + return firstFalseIndex(check) <0; + } + + private List getPrivilegeStringList( + Collection> privCollection) { + List userPrivs = new ArrayList(); + if (privCollection!= null && privCollection.size()>0) { + for (List grantList : privCollection) { + if (grantList == null){ + continue; + } + for (int i = 0; i < grantList.size(); i++) { + PrivilegeGrantInfo grant = grantList.get(i); + userPrivs.add(grant.getPrivilege()); + } + } + } + return userPrivs; + } + + private static void setBooleanArray(boolean[] check, boolean b) { + for (int i = 0; i < check.length; i++) { + check[i] = b; + } + } + + private static void booleanArrayOr(boolean[] output, boolean[] input) { + for (int i = 0; i < output.length && i < input.length; i++) { + output[i] = output[i] || input[i]; + } + } + + private void checkAndThrowAuthorizationException( + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + boolean[] inputCheck, boolean[] outputCheck,String dbName, + String tableName, String partitionName, String columnName) { + + String hiveObject = "{ "; + if (dbName != null) { + hiveObject = hiveObject + "database:" + dbName; + } + if (tableName != null) { + hiveObject = hiveObject + ", table:" + tableName; + } + if (partitionName != null) { + hiveObject = hiveObject + ", partitionName:" + partitionName; + } + if (columnName != null) { + hiveObject = hiveObject + ", columnName:" + columnName; + } + hiveObject = hiveObject + "}"; + + if (inputCheck != null) { + int input = this.firstFalseIndex(inputCheck); + if (input >= 0) { + throw new AuthorizationException("No privilege '" + + inputRequiredPriv[input].toString() + "' found for inputs " + + hiveObject); + } + } + + if (outputCheck != null) { + int output = this.firstFalseIndex(outputCheck); + if (output >= 0) { + throw new AuthorizationException("No privilege '" + + outputRequiredPriv[output].toString() + "' found for outputs " + + hiveObject); + } + } + } + + private int firstFalseIndex(boolean[] inputCheck) { + if (inputCheck != null) { + for (int i = 0; i < inputCheck.length; i++) { + if (!inputCheck[i]) { + return i; + } + } + } + return -1; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java (revision 1419361) +++ ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java (working copy) @@ -30,7 +30,7 @@ private String userName; private List groupNames; - + private Configuration conf; @Override Index: ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java (revision 0) @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; + + +/** + * HiveMetastoreAuthenticationProvider is an interface extension + * from HiveAuthenticationProvider for authentication from the + * metastore side. The implementation should return userNames + * and groupNames, and take care that if the metastore is running + * a particular command as a user, it returns that user. + */ +public interface HiveMetastoreAuthenticationProvider extends HiveAuthenticationProvider{ + + /** + * Allows invoker of HiveMetastoreAuthenticationProvider to send in a + * hive metastore handler that can be used to provide data for any + * authentication that needs to be done. + * @param handler + */ + void setMetaStoreHandler(HMSHandler handler); + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java (revision 0) @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; + +public class HadoopDefaultMetastoreAuthenticator extends HadoopDefaultAuthenticator + implements HiveMetastoreAuthenticationProvider { + + @Override + public void setMetaStoreHandler(HMSHandler handler) { + setConf(handler.getHiveConf()); + } + +}