diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 6d2748e..4f99f92 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -149,6 +149,7 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { HiveConf.ConfVars.METASTORE_EVENT_LISTENERS, HiveConf.ConfVars.METASTORE_EVENT_CLEAN_FREQ, HiveConf.ConfVars.METASTORE_EVENT_EXPIRY_DURATION, + HiveConf.ConfVars.METASTORE_FILTER_HOOK, HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, HiveConf.ConfVars.METASTORE_END_FUNCTION_LISTENERS, HiveConf.ConfVars.METASTORE_PART_INHERIT_TBL_PROPS, @@ -592,7 +593,8 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { "List of comma separated keys occurring in table properties which will get inherited to newly created partitions. \n" + "* implies all the keys will get inherited."), METASTORE_FILTER_HOOK("hive.metastore.filter.hook", "org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl", - "Metastore hook class for filtering the metadata read results"), + "Metastore hook class for filtering the metadata read results. If hive.security.authorization.manager" + + "is set to instance of HiveAuthorizerFactory, then this value is ignored."), // Parameters for exporting metadata on table drop (requires the use of the) // org.apache.hadoop.hive.ql.parse.MetaDataExportListener preevent listener diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java index cceac93..4ceee8c 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java @@ -17,19 +17,19 @@ */ package org.apache.hadoop.hive.metastore; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Index; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionSpec; @@ -53,7 +53,7 @@ public DummyMetaStoreFilterHookImpl(HiveConf conf) { } @Override - public List filterDatabases(List dbList) { + public List filterDatabases(List dbList) throws MetaException { if (blockResults) { return new ArrayList(); } @@ -69,7 +69,7 @@ public Database filterDatabase(Database dataBase) throws NoSuchObjectException { } @Override - public List filterTableNames(String dbName, List tableList) { + public List filterTableNames(String dbName, List tableList) throws MetaException { if (blockResults) { return new ArrayList(); } @@ -85,7 +85,7 @@ public Table filterTable(Table table) throws NoSuchObjectException { } @Override - public List filterTables(List
tableList) { + public List
filterTables(List
tableList) throws MetaException { if (blockResults) { return new ArrayList
(); } @@ -93,7 +93,7 @@ public Table filterTable(Table table) throws NoSuchObjectException { } @Override - public List filterPartitions(List partitionList) { + public List filterPartitions(List partitionList) throws MetaException { if (blockResults) { return new ArrayList(); } @@ -102,7 +102,7 @@ public Table filterTable(Table table) throws NoSuchObjectException { @Override public List filterPartitionSpecs( - List partitionSpecList) { + List partitionSpecList) throws MetaException { if (blockResults) { return new ArrayList(); } @@ -119,7 +119,7 @@ public Partition filterPartition(Partition partition) throws NoSuchObjectExcepti @Override public List filterPartitionNames(String dbName, String tblName, - List partitionNames) { + List partitionNames) throws MetaException { if (blockResults) { return new ArrayList(); } @@ -136,7 +136,7 @@ public Index filterIndex(Index index) throws NoSuchObjectException { @Override public List filterIndexNames(String dbName, String tblName, - List indexList) { + List indexList) throws MetaException { if (blockResults) { return new ArrayList(); } @@ -144,7 +144,7 @@ public Index filterIndex(Index index) throws NoSuchObjectException { } @Override - public List filterIndexes(List indexeList) { + public List filterIndexes(List indexeList) throws MetaException { if (blockResults) { return new ArrayList(); } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java new file mode 100644 index 0000000..c4dccba --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java @@ -0,0 +1,275 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Matchers.any; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mockito.Mockito; + +/** + * Test HiveAuthorizer api invocation + */ +public class TestHiveAuthorizerShowFilters { + protected static HiveConf conf; + protected static Driver driver; + private static final String tableName1 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "table1") + .toLowerCase(); + private static final String tableName2 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "table2") + .toLowerCase(); + private static final String dbName1 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "db1") + .toLowerCase(); + private static final String dbName2 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "db2") + .toLowerCase(); + + static HiveAuthorizer mockedAuthorizer; + + static final List AllTables = getSortedList(tableName1, tableName2); + static final List AllDbs = getSortedList("default", dbName1, dbName2); + + private static List filterArguments = null; + private static List filteredResults = new ArrayList(); + + /** + * This factory creates a mocked HiveAuthorizer class. The mocked class is + * used to capture the argument passed to HiveAuthorizer.filterListCmdObjects. + * It returns fileredResults object for call to + * HiveAuthorizer.filterListCmdObjects, and stores the list argument in + * filterArguments + */ + static class MockedHiveAuthorizerFactory implements HiveAuthorizerFactory { + @Override + public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, + HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) { + Mockito.validateMockitoUsage(); + + abstract class AuthorizerWithFilterCmdImpl implements HiveAuthorizer { + @Override + public List filterListCmdObjects(List listObjs, + HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { + // capture arguments in static + filterArguments = listObjs; + // return static variable with results, if it is set to some set of + // values + // otherwise return the arguments + if (filteredResults.size() == 0) { + return filterArguments; + } + return filteredResults; + } + } + + mockedAuthorizer = Mockito.mock(AuthorizerWithFilterCmdImpl.class, Mockito.withSettings() + .verboseLogging()); + + try { + Mockito.when( + mockedAuthorizer.filterListCmdObjects((List) any(), + (HiveAuthzContext) any())).thenCallRealMethod(); + } catch (Exception e) { + org.junit.Assert.fail("Caught exception " + e); + } + return mockedAuthorizer; + } + + } + + @BeforeClass + public static void beforeTest() throws Exception { + conf = new HiveConf(); + + // Turn on mocked authorization + conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); + conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); + conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); + conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + + SessionState.start(conf); + driver = new Driver(conf); + runCmd("create table " + tableName1 + + " (i int, j int, k string) partitioned by (city string, date string) "); + runCmd("create table " + tableName2 + "(i int)"); + + runCmd("create database " + dbName1); + runCmd("create database " + dbName2); + + } + + @Before + public void setup() { + filterArguments = null; + filteredResults.clear(); + } + + @AfterClass + public static void afterTests() throws Exception { + // Drop the tables when we're done. This makes the test work inside an IDE + runCmd("drop table if exists " + tableName1); + runCmd("drop table if exists " + tableName2); + runCmd("drop database if exists " + dbName1); + runCmd("drop database if exists " + dbName2); + driver.close(); + } + + @Test + public void testShowDatabasesAll() throws HiveAuthzPluginException, HiveAccessControlException, + CommandNeedRetryException, IOException { + runShowDbTest(AllDbs); + } + + @Test + public void testShowDatabasesSelected() throws HiveAuthzPluginException, + HiveAccessControlException, CommandNeedRetryException, IOException { + setFilteredResults(HivePrivilegeObjectType.DATABASE, dbName2); + runShowDbTest(Arrays.asList(dbName2)); + } + + private void runShowDbTest(List expectedDbList) throws HiveAuthzPluginException, + HiveAccessControlException, CommandNeedRetryException, IOException { + runCmd("show databases"); + verifyAllDb(); + assertEquals("filtered result check ", expectedDbList, getSortedResults()); + } + + @Test + public void testShowTablesAll() throws HiveAuthzPluginException, HiveAccessControlException, + CommandNeedRetryException, IOException { + runShowTablesTest(AllTables); + } + + @Test + public void testShowTablesSelected() throws HiveAuthzPluginException, HiveAccessControlException, + CommandNeedRetryException, IOException { + setFilteredResults(HivePrivilegeObjectType.TABLE_OR_VIEW, tableName2); + runShowTablesTest(Arrays.asList(tableName2)); + } + + private void runShowTablesTest(List expectedTabs) throws IOException, + CommandNeedRetryException, HiveAuthzPluginException, HiveAccessControlException { + runCmd("show tables"); + verifyAllTables(); + assertEquals("filtered result check ", expectedTabs, getSortedResults()); + } + + private List getSortedResults() throws IOException, CommandNeedRetryException { + List res = new ArrayList(); + // set results to be returned + driver.getResults(res); + Collections.sort(res); + return res; + } + + /** + * Verify that arguments to call to HiveAuthorizer.filterListCmdObjects are of + * type DATABASE and contain all databases. + * + * @throws HiveAccessControlException + * @throws HiveAuthzPluginException + */ + private void verifyAllDb() throws HiveAuthzPluginException, HiveAccessControlException { + List privObjs = filterArguments; + + // get the db names out + List dbArgs = new ArrayList(); + for (HivePrivilegeObject privObj : privObjs) { + assertEquals("Priv object type should be db", HivePrivilegeObjectType.DATABASE, + privObj.getType()); + dbArgs.add(privObj.getDbname()); + } + + // sort before comparing with expected results + Collections.sort(dbArgs); + assertEquals("All db should be passed as arguments", AllDbs, dbArgs); + } + + /** + * Verify that arguments to call to HiveAuthorizer.filterListCmdObjects are of + * type TABLE and contain all tables. + * + * @throws HiveAccessControlException + * @throws HiveAuthzPluginException + */ + private void verifyAllTables() throws HiveAuthzPluginException, HiveAccessControlException { + List privObjs = filterArguments; + + // get the table names out + List tables = new ArrayList(); + for (HivePrivilegeObject privObj : privObjs) { + assertEquals("Priv object type should be db", HivePrivilegeObjectType.TABLE_OR_VIEW, + privObj.getType()); + assertEquals("Database name", "default", privObj.getDbname()); + tables.add(privObj.getObjectName()); + } + + // sort before comparing with expected results + Collections.sort(tables); + assertEquals("All tables should be passed as arguments", AllTables, tables); + } + + private static void setFilteredResults(HivePrivilegeObjectType type, String... objs) { + filteredResults.clear(); + for (String obj : objs) { + String dbname; + String tabname = null; + if (type == HivePrivilegeObjectType.DATABASE) { + dbname = obj; + } else { + dbname = "default"; + tabname = obj; + } + filteredResults.add(new HivePrivilegeObject(type, dbname, tabname)); + } + } + + private static void runCmd(String cmd) throws CommandNeedRetryException { + CommandProcessorResponse resp = driver.run(cmd); + assertEquals(0, resp.getResponseCode()); + } + + private static List getSortedList(String... strings) { + return getSortedList(Arrays.asList(strings)); + } + + private static List getSortedList(List columns) { + List sortedCols = new ArrayList(columns); + Collections.sort(sortedCols); + return sortedCols; + } + +} diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java b/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java index b723484..f0cab26 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Index; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionSpec; @@ -37,7 +38,7 @@ public DefaultMetaStoreFilterHookImpl(HiveConf conf) { } @Override - public List filterDatabases(List dbList) { + public List filterDatabases(List dbList) throws MetaException { return dbList; } @@ -47,7 +48,7 @@ public Database filterDatabase(Database dataBase) throws NoSuchObjectException { } @Override - public List filterTableNames(String dbName, List tableList) { + public List filterTableNames(String dbName, List tableList) throws MetaException { return tableList; } @@ -57,18 +58,18 @@ public Table filterTable(Table table) throws NoSuchObjectException { } @Override - public List
filterTables(List
tableList) { + public List
filterTables(List
tableList) throws MetaException { return tableList; } @Override - public List filterPartitions(List partitionList) { + public List filterPartitions(List partitionList) throws MetaException { return partitionList; } @Override public List filterPartitionSpecs( - List partitionSpecList) { + List partitionSpecList) throws MetaException { return partitionSpecList; } @@ -79,7 +80,7 @@ public Partition filterPartition(Partition partition) throws NoSuchObjectExcept @Override public List filterPartitionNames(String dbName, String tblName, - List partitionNames) { + List partitionNames) throws MetaException { return partitionNames; } @@ -90,12 +91,12 @@ public Index filterIndex(Index index) throws NoSuchObjectException { @Override public List filterIndexNames(String dbName, String tblName, - List indexList) { + List indexList) throws MetaException { return indexList; } @Override - public List filterIndexes(List indexeList) { + public List filterIndexes(List indexeList) throws MetaException { return indexeList; } } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java index 51f63ad..933ae2d 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Index; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionSpec; @@ -43,7 +44,7 @@ * @param dbList * @return List of filtered Db names */ - public List filterDatabases(List dbList); + public List filterDatabases(List dbList) throws MetaException; /** * filter to given database object if applicable @@ -51,7 +52,7 @@ * @return the same database if it's not filtered out * @throws NoSuchObjectException */ - public Database filterDatabase(Database dataBase) throws NoSuchObjectException; + public Database filterDatabase(Database dataBase) throws MetaException, NoSuchObjectException; /** * Filter given list of tables @@ -59,7 +60,7 @@ * @param tableList * @returnList of filtered table names */ - public List filterTableNames(String dbName, List tableList); + public List filterTableNames(String dbName, List tableList) throws MetaException; /** * filter to given table object if applicable @@ -67,7 +68,7 @@ * @return the same table if it's not filtered out * @throws NoSuchObjectException */ - public Table filterTable(Table table) throws NoSuchObjectException; + public Table filterTable(Table table) throws MetaException, NoSuchObjectException; /** * Filter given list of tables @@ -75,21 +76,22 @@ * @param tableList * @returnList of filtered table names */ - public List
filterTables(List
tableList); + public List
filterTables(List
tableList) throws MetaException; /** * Filter given list of partitions * @param partitionList * @return */ - public List filterPartitions(List partitionList); + public List filterPartitions(List partitionList) throws MetaException; /** * Filter given list of partition specs * @param partitionSpecList * @return */ - public List filterPartitionSpecs(List partitionSpecList); + public List filterPartitionSpecs(List partitionSpecList) + throws MetaException; /** * filter to given partition object if applicable @@ -97,7 +99,7 @@ * @return the same partition object if it's not filtered out * @throws NoSuchObjectException */ - public Partition filterPartition(Partition partition) throws NoSuchObjectException; + public Partition filterPartition(Partition partition) throws MetaException, NoSuchObjectException; /** * Filter given list of partition names @@ -107,9 +109,9 @@ * @return */ public List filterPartitionNames(String dbName, String tblName, - List partitionNames); + List partitionNames) throws MetaException; - public Index filterIndex(Index index) throws NoSuchObjectException; + public Index filterIndex(Index index) throws MetaException, NoSuchObjectException; /** * Filter given list of index names @@ -119,13 +121,13 @@ * @return */ public List filterIndexNames(String dbName, String tblName, - List indexList); + List indexList) throws MetaException; /** * Filter given list of index objects * @param indexeList * @return */ - public List filterIndexes(List indexeList); + public List filterIndexes(List indexeList) throws MetaException; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java new file mode 100644 index 0000000..0989e20 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java @@ -0,0 +1,108 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Metastore filter hook for filtering out the list of objects that the current authorization + * implementation does not allow user to see + */ +@Private +public class AuthorizationMetaStoreFilterHook extends DefaultMetaStoreFilterHookImpl { + + public static final Log LOG = LogFactory.getLog(AuthorizationMetaStoreFilterHook.class); + + public AuthorizationMetaStoreFilterHook(HiveConf conf) { + super(conf); + } + + @Override + public List filterTableNames(String dbName, List tableList) throws MetaException { + List listObjs = getHivePrivObjects(dbName, tableList); + return getTableNames(getFilteredObjects(listObjs)); + } + + @Override + public List filterDatabases(List dbList) throws MetaException { + List listObjs = getHivePrivObjects(dbList); + return getDbNames(getFilteredObjects(listObjs)); + } + + private List getHivePrivObjects(List dbList) { + List objs = new ArrayList(); + for(String dbname : dbList) { + objs.add(new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, dbname, dbname)); + } + return objs; + } + + private List getDbNames(List filteredObjects) { + List tnames = new ArrayList(); + for(HivePrivilegeObject obj : filteredObjects) { + tnames.add(obj.getDbname()); + } + return tnames; + } + + private List getTableNames(List filteredObjects) { + List tnames = new ArrayList(); + for(HivePrivilegeObject obj : filteredObjects) { + tnames.add(obj.getObjectName()); + } + return tnames; + } + + private List getFilteredObjects(List listObjs) throws MetaException { + SessionState ss = SessionState.get(); + HiveAuthzContext.Builder authzContextBuilder = new HiveAuthzContext.Builder(); + authzContextBuilder.setUserIpAddress(ss.getUserIpAddress()); + try { + return ss.getAuthorizerV2().filterListCmdObjects(listObjs, authzContextBuilder.build()); + } catch (HiveAuthzPluginException e) { + LOG.error(e); + throw new MetaException(e.getMessage()); + } catch (HiveAccessControlException e) { + // authorization error is not really expected in a filter call + // the impl should have just filtered out everything. A checkPrivileges call + // would have already been made to authorize this action + LOG.error(e); + throw new MetaException(e.getMessage()); + } + } + + private List getHivePrivObjects(String dbName, List tableList) { + List objs = new ArrayList(); + for(String tname : tableList) { + objs.add(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, dbName, tname)); + } + return objs; + } + +} + diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java index d877686..adae8ea 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java @@ -24,8 +24,7 @@ /** * Exception thrown by the Authorization plugin api (v2). Indicates - * an error while performing authorization, and not a authorization being - * denied. + * a authorization check denying permissions for an action. */ @LimitedPrivate(value = { "Apache Argus (incubating)" }) @Evolving diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java index 5a5b3d5..59aabe4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java @@ -29,17 +29,15 @@ public interface HiveAuthorizationValidator { /** - * Check if current user has privileges to perform given operation type - * hiveOpType on the given input and output objects - * - * @param hiveOpType - * @param inputHObjs - * @param outputHObjs - * @param context - * @throws HiveAuthzPluginException - * @throws HiveAccessControlException + * see HiveAuthorizer.checkPrivileges */ void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs, List outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException; + /** + * see HiveAuthorizer.filterListCmdObjects + */ + List filterListCmdObjects(List listObjs, + HiveAuthzContext context); + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java index 1f1eba2..97d9aa9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java @@ -154,6 +154,21 @@ void checkPrivileges(HiveOperationType hiveOpType, List inp List outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException; + + /** + * Filter out any objects that should not be shown to the user, from the list of + * tables or databases coming from a 'show tables' or 'show databases' command + * @param listObjs List of all objects obtained as result of a show command + * @param context + * @return filtered list of objects that will be returned to the user invoking the command + * @throws HiveAuthzPluginException + * @throws HiveAccessControlException + */ + List filterListCmdObjects(List listObjs, + HiveAuthzContext context) + throws HiveAuthzPluginException, HiveAccessControlException; + + /** * @return all existing roles * @throws HiveAuthzPluginException diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java index e615049..c555fbf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java @@ -85,6 +85,13 @@ public void checkPrivileges(HiveOperationType hiveOpType, List filterListCmdObjects(List listObjs, + HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { + return authValidator.filterListCmdObjects(listObjs, context); + } + @Override public List getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException { return accessController.getAllRoles(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java index ac1cc47..86de47c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java @@ -21,7 +21,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Set; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; @@ -372,4 +371,11 @@ public void setCurrentRole(String roleName) throws HiveAccessControlException, H @Override public void applyAuthorizationConfigPolicy(HiveConf hiveConf) { } + + @Override + public List filterListCmdObjects(List listObjs, + HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { + // do no filtering in old authorizer + return listObjs; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java index cabc22a..b832fc8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java @@ -42,4 +42,10 @@ public void checkPrivileges(HiveOperationType hiveOpType, List filterListCmdObjects(List listObjs, + HiveAuthzContext context) { + return listObjs; + } + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java index 0e093b0..7267756 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java @@ -149,4 +149,10 @@ private void checkPrivileges(HiveOperationType hiveOpType, List filterListCmdObjects(List listObjs, + HiveAuthzContext context) { + return listObjs; + } + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index d81b44c..14bc46a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -26,7 +26,15 @@ import java.net.URI; import java.net.URLClassLoader; import java.sql.Timestamp; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; @@ -41,6 +49,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.MapRedStats; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.spark.session.SparkSession; @@ -672,7 +681,7 @@ private void setupAuth() { clsStr, authenticator, true); if (authorizer == null) { - // if it was null, the new authorization plugin must be specified in + // if it was null, the new (V2) authorization plugin must be specified in // config HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); @@ -684,13 +693,17 @@ private void setupAuth() { authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), conf, authenticator, authzContextBuilder.build()); + setAuthorizerV2Config(); - authorizerV2.applyAuthorizationConfigPolicy(conf); } // create the create table grants with new config createTableGrants = CreateTableAutomaticGrant.create(conf); } catch (HiveException e) { + LOG.error("Error setting up authorization: " + e.getMessage(), e); + throw new RuntimeException(e); + } catch (MetaException e) { + LOG.error("Error setting up authorization: " + e.getMessage(), e); throw new RuntimeException(e); } @@ -701,6 +714,22 @@ private void setupAuth() { return; } + private void setAuthorizerV2Config() throws MetaException, HiveException { + // avoid processing the same config multiple times, check marker + if (conf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) { + return; + } + conf.setVar(ConfVars.METASTORE_FILTER_HOOK, + "org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook"); + + authorizerV2.applyAuthorizationConfigPolicy(conf); + // update config in Hive thread local as well and init the metastore client + Hive.get(conf).getMSC(); + + // set a marker that this conf has been processed. + conf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString()); + } + public Object getActiveAuthorizer() { return getAuthorizationMode() == AuthorizationMode.V1 ? getAuthorizer() : getAuthorizerV2(); @@ -1359,22 +1388,10 @@ public String getUserName() { /** * If authorization mode is v2, then pass it through authorizer so that it can apply * any security configuration changes. + * @throws MetaException */ - public void applyAuthorizationPolicy() throws HiveException { - if(!isAuthorizationModeV2()){ - // auth v1 interface does not have this functionality - return; - } - - // avoid processing the same config multiple times, check marker - if (conf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) { - return; - } - - authorizerV2.applyAuthorizationConfigPolicy(conf); - // set a marker that this conf has been processed. - conf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString()); - + public void applyAuthorizationPolicy() throws HiveException, MetaException { + setupAuth(); } public Map> getTempTables() { diff --git a/service/src/java/org/apache/hive/service/cli/CLIService.java b/service/src/java/org/apache/hive/service/cli/CLIService.java index 883bf9b..9fed8ae 100644 --- a/service/src/java/org/apache/hive/service/cli/CLIService.java +++ b/service/src/java/org/apache/hive/service/cli/CLIService.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -79,8 +80,9 @@ public CLIService(HiveServer2 hiveServer2) { public synchronized void init(HiveConf hiveConf) { try { applyAuthorizationConfigPolicy(hiveConf); - } catch (HiveException e) { - throw new RuntimeException("Error applying authorization policy on hive configuration", e); + } catch (Exception e) { + throw new RuntimeException("Error applying authorization policy on hive configuration: " + + e.getMessage(), e); } this.hiveConf = hiveConf; sessionManager = new SessionManager(hiveServer2); @@ -115,7 +117,8 @@ public synchronized void init(HiveConf hiveConf) { super.init(hiveConf); } - private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException { + private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException, + MetaException { // authorization setup using SessionState should be revisited eventually, as // authorization and authentication are not session specific settings SessionState ss = new SessionState(newHiveConf);