From 15d492fb47a00fc08fc884cffcc405f5c7990d04 Mon Sep 17 00:00:00 2001 From: rmani Date: Fri, 6 Dec 2019 15:59:21 -0800 Subject: [PATCH 1/1] HIVE-22573:HMS filter metadata using Ranger plugin --- .../plugin/metastore/HiveMetaStoreAuthorizer.java | 542 ++++++++++++++++----- .../plugin/metastore/HiveMetaStoreAuthzInfo.java | 102 ++-- .../plugin/metastore/events/ReadDatabaseEvent.java | 88 ++++ .../plugin/metastore/events/ReadTableEvent.java | 77 +++ .../filtercontext/DatabaseFilterContext.java | 81 +++ .../filtercontext/TableFilterContext.java | 102 ++++ .../metastore/TestHiveMetaStoreAuthorizer.java | 43 ++ 7 files changed, 855 insertions(+), 180 deletions(-) create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadDatabaseEvent.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadTableEvent.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/DatabaseFilterContext.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/TableFilterContext.java diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java index 434d1c9..dcd310f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java @@ -18,10 +18,12 @@ */ package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.MetaStoreFilterHook; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.MetaStorePreEventListener; import org.apache.hadoop.hive.metastore.TableType; @@ -34,6 +36,11 @@ import org.apache.hadoop.hive.metastore.events.PreDropTableEvent; import org.apache.hadoop.hive.metastore.events.PreEventContext; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PartitionSpec; +import org.apache.hadoop.hive.metastore.api.TableMeta; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider; @@ -45,78 +52,308 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.DatabaseFilterContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.TableFilterContext; import org.apache.hadoop.security.UserGroupInformation; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; /** * HiveMetaStoreAuthorizer : Do authorization checks on MetaStore Events in MetaStorePreEventListener */ -public class HiveMetaStoreAuthorizer extends MetaStorePreEventListener { - private static final Log LOG = LogFactory.getLog(HiveMetaStoreAuthorizer.class); +public class HiveMetaStoreAuthorizer extends MetaStorePreEventListener implements MetaStoreFilterHook { + private static final Log LOG = LogFactory.getLog(HiveMetaStoreAuthorizer.class); private static final ThreadLocal tConfig = new ThreadLocal() { - @Override - protected Configuration initialValue() { + @Override protected Configuration initialValue() { return new HiveConf(HiveMetaStoreAuthorizer.class); } }; - private static final ThreadLocal tAuthenticator = new ThreadLocal() { - @Override - protected HiveMetastoreAuthenticationProvider initialValue() { - try { - return (HiveMetastoreAuthenticationProvider) HiveUtils.getAuthenticator(tConfig.get(), HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER); - } catch (HiveException excp) { - throw new IllegalStateException("Authentication provider instantiation failure", excp); - } - } - }; + private static final ThreadLocal tAuthenticator = + new ThreadLocal() { + @Override protected HiveMetastoreAuthenticationProvider initialValue() { + try { + return (HiveMetastoreAuthenticationProvider) HiveUtils + .getAuthenticator(tConfig.get(), HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER); + } catch (HiveException excp) { + throw new IllegalStateException("Authentication provider instantiation failure", excp); + } + } + }; public HiveMetaStoreAuthorizer(Configuration config) { super(config); } - @Override - public final void onEvent(PreEventContext preEventContext) throws MetaException, NoSuchObjectException, InvalidOperationException { + @Override public final void onEvent(PreEventContext preEventContext) + throws MetaException, NoSuchObjectException, InvalidOperationException { if (LOG.isDebugEnabled()) { LOG.debug("==> HiveMetaStoreAuthorizer.onEvent(): EventType=" + preEventContext.getEventType()); } - HiveMetaStoreAuthzInfo authzContext = buildAuthzContext(preEventContext); + try { + HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); + if (!skipAuthorization()) { + HiveMetaStoreAuthzInfo authzContext = buildAuthzContext(preEventContext); + checkPrivileges(authzContext, hiveAuthorizer); + } + } catch (Exception e) { + LOG.error("HiveMetaStoreAuthorizer.onEvent(): failed", e); + throw new MetaException(e.getMessage()); + } - if (!skipAuthorization(authzContext)) { - try { - HiveConf hiveConf = new HiveConf(super.getConf(), HiveConf.class); - HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(hiveConf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); + if (LOG.isDebugEnabled()) { + LOG.debug("<== HiveMetaStoreAuthorizer.onEvent(): EventType=" + preEventContext.getEventType()); + } + } + + @Override public final List filterDatabases(List list) throws MetaException { + if (LOG.isDebugEnabled()) { + LOG.debug("HiveMetaStoreAuthorizer.filterDatabases()"); + } - if (authorizerFactory != null) { - HiveMetastoreAuthenticationProvider authenticator = tAuthenticator.get(); + if (list == null) { + return Collections.emptyList(); + } - authenticator.setConf(hiveConf); + DatabaseFilterContext databaseFilterContext = new DatabaseFilterContext(list); + HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = databaseFilterContext.getAuthzContext(); + List filteredDatabases = filterDatabaseObjects(hiveMetaStoreAuthzInfo); + if (CollectionUtils.isEmpty(filteredDatabases)) { + filteredDatabases = Collections.emptyList(); + } + + if (LOG.isDebugEnabled()) { + LOG.debug("HiveMetaStoreAuthorizer.filterDatabases() :" + filteredDatabases); + } + return filteredDatabases; + } - HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder(); + @Override public final Database filterDatabase(Database database) throws MetaException, NoSuchObjectException { + if (database != null) { + String dbName = database.getName(); + List databases = filterDatabases(Collections.singletonList(dbName)); + if (databases.isEmpty()) { + throw new NoSuchObjectException(String.format("Database %s does not exist", dbName)); + } + } + return database; + } - authzContextBuilder.setClientType(HiveAuthzSessionContext.CLIENT_TYPE.HIVEMETASTORE); - authzContextBuilder.setSessionString("HiveMetaStore"); + @Override public final List filterTableNames(String s, String s1, List list) throws MetaException { + if (LOG.isDebugEnabled()) { + LOG.debug("==> HiveMetaStoreAuthorizer.filterTableNames()"); + } + List filteredTableNames = null; + if (list != null) { + String dbName = getDBName(s1); + TableFilterContext tableFilterContext = new TableFilterContext(dbName, list); + HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = tableFilterContext.getAuthzContext(); + filteredTableNames = filterTableNames(hiveMetaStoreAuthzInfo, dbName, list); + if (CollectionUtils.isEmpty(filteredTableNames)) { + filteredTableNames = Collections.emptyList(); + } + } - HiveAuthzSessionContext authzSessionContext = authzContextBuilder.build(); + if (LOG.isDebugEnabled()) { + LOG.debug("<== HiveMetaStoreAuthorizer.filterTableNames() : " + filteredTableNames); + } - HiveAuthorizer hiveAuthorizer = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), hiveConf, authenticator, authzSessionContext); + return filteredTableNames; + } - checkPrivileges(authzContext, hiveAuthorizer); - } - } catch (Exception e) { - LOG.error("HiveMetaStoreAuthorizer.onEvent(): failed", e); - throw new MetaException(e.getMessage()); + @Override public final Table filterTable(Table table) throws MetaException, NoSuchObjectException { + if (table != null) { + List tables = filterTables(Collections.singletonList(table)); + if (tables.isEmpty()) { + throw new NoSuchObjectException(String.format("Database %s does not exist", table.getTableName())); } } + return table; + } + @Override public final List
filterTables(List
list) throws MetaException { if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetaStoreAuthorizer.onEvent(): EventType=" + preEventContext.getEventType()); + LOG.debug("==> HiveMetaStoreAuthorizer.filterTables()"); + } + + List
filteredTables = null; + + if (list != null) { + TableFilterContext tableFilterContext = new TableFilterContext(list); + HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = tableFilterContext.getAuthzContext(); + filteredTables = filterTableObjects(hiveMetaStoreAuthzInfo, list); + if (CollectionUtils.isEmpty(filteredTables)) { + filteredTables = Collections.emptyList(); + } } + + if (LOG.isDebugEnabled()) { + LOG.debug("<== HiveMetaStoreAuthorizer.filterTables(): " + filteredTables); + } + return filteredTables; + } + + @Override public final Catalog filterCatalog(Catalog catalog) throws MetaException { + return catalog; + } + + @Override public final List filterCatalogs(List catalogs) throws MetaException { + return catalogs; + } + + @Override public final List filterTableMetas(String catName, String dbName, List tableMetas) + throws MetaException { + return tableMetas; + } + + @Override public final List filterPartitions(List list) throws MetaException { + return list; + } + + @Override public final List filterPartitionSpecs(List list) throws MetaException { + return list; + } + + @Override public final Partition filterPartition(Partition partition) throws MetaException, NoSuchObjectException { + return partition; + } + + @Override public final List filterPartitionNames(String s, String s1, String s2, List list) + throws MetaException { + return list; + } + + private List filterDatabaseObjects(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo) throws MetaException { + List ret = null; + + if (LOG.isDebugEnabled()) { + LOG.debug("==> HiveMetaStoreAuthorizer.filterDatabaseObjects()"); + } + + try { + HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); + List hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); + HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); + List filteredHivePrivilegeObjects = + hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); + if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { + ret = getFilterDatabaseList(filteredHivePrivilegeObjects); + } + } catch (Exception e) { + throw new MetaException("Error in HiveMetaStoreAuthorizer.filterDatabase()" + e.getMessage()); + } + if (LOG.isDebugEnabled()) { + LOG.debug("<== HiveMetaStoreAuthorizer.filterDatabaseObjects() :" + ret); + } + return ret; + } + + private List
filterTableObjects(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo, List
tableList) + throws MetaException { + List
ret = null; + + try { + HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); + List hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); + HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); + List filteredHivePrivilegeObjects = + hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); + if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { + ret = getFilteredTableList(filteredHivePrivilegeObjects, tableList); + } + } catch (Exception e) { + throw new MetaException("Error in HiveMetaStoreAuthorizer.filterTables()" + e.getMessage()); + } + return ret; + } + + private List getFilterDatabaseList(List hivePrivilegeObjects) { + List ret = new ArrayList<>(); + for (HivePrivilegeObject hivePrivilegeObject : hivePrivilegeObjects) { + String dbName = hivePrivilegeObject.getDbname(); + ret.add(dbName); + } + return ret; + } + + private List
getFilteredTableList(List hivePrivilegeObjects, List
tableList) { + List
ret = new ArrayList<>(); + for (HivePrivilegeObject hivePrivilegeObject : hivePrivilegeObjects) { + String dbName = hivePrivilegeObject.getDbname(); + String tblName = hivePrivilegeObject.getObjectName(); + Table table = getFilteredTable(dbName, tblName, tableList); + if (table != null) { + ret.add(table); + } + } + return ret; + } + + private Table getFilteredTable(String dbName, String tblName, List
tableList) { + Table ret = null; + for (Table table : tableList) { + String databaseName = table.getDbName(); + String tableName = table.getTableName(); + if (dbName.equals(databaseName) && tblName.equals(tableName)) { + ret = table; + break; + } + } + return ret; + } + + private List filterTableNames(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo, String dbName, + List tableNames) throws MetaException { + List ret = null; + + try { + HiveAuthorizer hiveAuthorizer = createHiveMetaStoreAuthorizer(); + List hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); + HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); + List filteredHivePrivilegeObjects = + hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); + if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { + ret = getFilteredTableNames(filteredHivePrivilegeObjects, dbName, tableNames); + } + } catch (Exception e) { + throw new MetaException("Error in HiveMetaStoreAuthorizer.filterTables()" + e.getMessage()); + } + return ret; + } + + private List getFilteredTableNames(List hivePrivilegeObjects, String databaseName, + List tableNames) { + List ret = new ArrayList<>(); + for (HivePrivilegeObject hivePrivilegeObject : hivePrivilegeObjects) { + String dbName = hivePrivilegeObject.getDbname(); + String tblName = hivePrivilegeObject.getObjectName(); + String table = getFilteredTableNames(dbName, tblName, databaseName, tableNames); + if (table != null) { + ret.add(table); + } + } + return ret; + } + + private String getFilteredTableNames(String dbName, String tblName, String databaseName, List tableNames) { + String ret = null; + for (String tableName : tableNames) { + if (dbName.equals(databaseName) && tblName.equals(tableName)) { + ret = tableName; + break; + } + } + return ret; + } + + private String getDBName(String str) { + return (str != null) ? str.substring(str.indexOf("#") + 1) : null; } HiveMetaStoreAuthzInfo buildAuthzContext(PreEventContext preEventContext) throws MetaException { @@ -129,117 +366,152 @@ HiveMetaStoreAuthzInfo buildAuthzContext(PreEventContext preEventContext) throws if (preEventContext != null) { switch (preEventContext.getEventType()) { - case CREATE_DATABASE: - authzEvent = new CreateDatabaseEvent(preEventContext); - break; - case ALTER_DATABASE: - authzEvent = new AlterDatabaseEvent(preEventContext); - break; - case DROP_DATABASE: - authzEvent = new DropDatabaseEvent(preEventContext); - break; - case CREATE_TABLE: - authzEvent = new CreateTableEvent(preEventContext); - if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { - throw new MetaException(getErrorMessage("CREATE_VIEW", getCurrentUser(authzEvent))); - } - break; - case ALTER_TABLE: - authzEvent = new AlterTableEvent(preEventContext); - if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { - throw new MetaException(getErrorMessage("ALTER_VIEW", getCurrentUser(authzEvent))); - } - break; - case DROP_TABLE: - authzEvent = new DropTableEvent(preEventContext); - if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { - throw new MetaException(getErrorMessage("DROP_VIEW", getCurrentUser(authzEvent))); - } - break; - case ADD_PARTITION: - authzEvent = new AddPartitionEvent(preEventContext); - break; - case ALTER_PARTITION: - authzEvent = new AlterPartitionEvent(preEventContext); - break; - case LOAD_PARTITION_DONE: - authzEvent = new LoadPartitionDoneEvent(preEventContext); - break; - case DROP_PARTITION: - authzEvent = new DropPartitionEvent(preEventContext); - break; - case AUTHORIZATION_API_CALL: - case READ_ISCHEMA: - case CREATE_ISCHEMA: - case DROP_ISCHEMA: - case ALTER_ISCHEMA: - case ADD_SCHEMA_VERSION: - case ALTER_SCHEMA_VERSION: - case DROP_SCHEMA_VERSION: - case READ_SCHEMA_VERSION: - case CREATE_CATALOG: - case ALTER_CATALOG: - case DROP_CATALOG: - if (!isSuperUser(getCurrentUser())) { - throw new MetaException(getErrorMessage(preEventContext, getCurrentUser())); - } - break; - default: - break; - } + case CREATE_DATABASE: + authzEvent = new CreateDatabaseEvent(preEventContext); + break; + case ALTER_DATABASE: + authzEvent = new AlterDatabaseEvent(preEventContext); + break; + case DROP_DATABASE: + authzEvent = new DropDatabaseEvent(preEventContext); + break; + case CREATE_TABLE: + authzEvent = new CreateTableEvent(preEventContext); + if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { + throw new MetaException(getErrorMessage("CREATE_VIEW", getCurrentUser(authzEvent))); + } + break; + case ALTER_TABLE: + authzEvent = new AlterTableEvent(preEventContext); + if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { + throw new MetaException(getErrorMessage("ALTER_VIEW", getCurrentUser(authzEvent))); + } + break; + case DROP_TABLE: + authzEvent = new DropTableEvent(preEventContext); + if (isViewOperation(preEventContext) && (!isSuperUser(getCurrentUser(authzEvent)))) { + throw new MetaException(getErrorMessage("DROP_VIEW", getCurrentUser(authzEvent))); + } + break; + case ADD_PARTITION: + authzEvent = new AddPartitionEvent(preEventContext); + break; + case ALTER_PARTITION: + authzEvent = new AlterPartitionEvent(preEventContext); + break; + case LOAD_PARTITION_DONE: + authzEvent = new LoadPartitionDoneEvent(preEventContext); + break; + case DROP_PARTITION: + authzEvent = new DropPartitionEvent(preEventContext); + break; + case READ_TABLE: + authzEvent = new ReadTableEvent(preEventContext); + break; + case READ_DATABASE: + authzEvent = new ReadDatabaseEvent(preEventContext); + break; + case AUTHORIZATION_API_CALL: + case READ_ISCHEMA: + case CREATE_ISCHEMA: + case DROP_ISCHEMA: + case ALTER_ISCHEMA: + case ADD_SCHEMA_VERSION: + case ALTER_SCHEMA_VERSION: + case DROP_SCHEMA_VERSION: + case READ_SCHEMA_VERSION: + case CREATE_CATALOG: + case ALTER_CATALOG: + case DROP_CATALOG: + if (!isSuperUser(getCurrentUser())) { + throw new MetaException(getErrorMessage(preEventContext, getCurrentUser())); + } + break; + default: + break; + } } HiveMetaStoreAuthzInfo ret = authzEvent != null ? authzEvent.getAuthzContext() : null; if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetaStoreAuthorizer.buildAuthzContext(): EventType=" + preEventContext.getEventType() + "; ret=" + ret); + LOG.debug( + "<== HiveMetaStoreAuthorizer.buildAuthzContext(): EventType=" + preEventContext.getEventType() + "; ret=" + + ret); + } + + return ret; + } + + HiveAuthorizer createHiveMetaStoreAuthorizer() throws Exception { + HiveAuthorizer ret = null; + HiveConf hiveConf = new HiveConf(super.getConf(), HiveConf.class); + HiveAuthorizerFactory authorizerFactory = + HiveUtils.getAuthorizerFactory(hiveConf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); + + if (authorizerFactory != null) { + HiveMetastoreAuthenticationProvider authenticator = tAuthenticator.get(); + + authenticator.setConf(hiveConf); + + HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder(); + + authzContextBuilder.setClientType(HiveAuthzSessionContext.CLIENT_TYPE.HIVEMETASTORE); + authzContextBuilder.setSessionString("HiveMetaStore"); + + HiveAuthzSessionContext authzSessionContext = authzContextBuilder.build(); + + ret = authorizerFactory + .createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), hiveConf, authenticator, authzSessionContext); } return ret; } boolean isSuperUser(String userName) { - Configuration conf = getConf(); - String ipAddress = HiveMetaStore.HMSHandler.getIPAddress(); + Configuration conf = getConf(); + String ipAddress = HiveMetaStore.HMSHandler.getIPAddress(); return (MetaStoreServerUtils.checkUserHasHostProxyPrivileges(userName, conf, ipAddress)); } boolean isViewOperation(PreEventContext preEventContext) { boolean ret = false; - PreEventContext.PreEventType preEventType = preEventContext.getEventType(); + PreEventContext.PreEventType preEventType = preEventContext.getEventType(); switch (preEventType) { - case CREATE_TABLE: - PreCreateTableEvent preCreateTableEvent = (PreCreateTableEvent) preEventContext; - Table table = preCreateTableEvent.getTable(); - ret = isViewType(table); - break; - case ALTER_TABLE: - PreAlterTableEvent preAlterTableEvent = (PreAlterTableEvent) preEventContext; - Table inTable = preAlterTableEvent.getOldTable(); - Table outTable = preAlterTableEvent.getNewTable(); - ret = (isViewType(inTable) || isViewType(outTable)); - break; - case DROP_TABLE: - PreDropTableEvent preDropTableEvent = (PreDropTableEvent) preEventContext; - Table droppedTable = preDropTableEvent.getTable(); - ret = isViewType(droppedTable); - break; + case CREATE_TABLE: + PreCreateTableEvent preCreateTableEvent = (PreCreateTableEvent) preEventContext; + Table table = preCreateTableEvent.getTable(); + ret = isViewType(table); + break; + case ALTER_TABLE: + PreAlterTableEvent preAlterTableEvent = (PreAlterTableEvent) preEventContext; + Table inTable = preAlterTableEvent.getOldTable(); + Table outTable = preAlterTableEvent.getNewTable(); + ret = (isViewType(inTable) || isViewType(outTable)); + break; + case DROP_TABLE: + PreDropTableEvent preDropTableEvent = (PreDropTableEvent) preEventContext; + Table droppedTable = preDropTableEvent.getTable(); + ret = isViewType(droppedTable); + break; } return ret; } - private void checkPrivileges(final HiveMetaStoreAuthzInfo authzContext, HiveAuthorizer authorizer) throws MetaException { + private void checkPrivileges(final HiveMetaStoreAuthzInfo authzContext, HiveAuthorizer authorizer) + throws MetaException { if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveMetaStoreAuthorizer.checkPrivileges(): authzContext=" + authzContext + ", authorizer=" + authorizer); + LOG.debug( + "==> HiveMetaStoreAuthorizer.checkPrivileges(): authzContext=" + authzContext + ", authorizer=" + authorizer); } - HiveOperationType hiveOpType = authzContext.getOperationType(); - List inputHObjs = authzContext.getInputHObjs(); - List outputHObjs = authzContext.getOutputHObjs(); - HiveAuthzContext hiveAuthzContext = authzContext.getHiveAuthzContext(); + HiveOperationType hiveOpType = authzContext.getOperationType(); + List inputHObjs = authzContext.getInputHObjs(); + List outputHObjs = authzContext.getOutputHObjs(); + HiveAuthzContext hiveAuthzContext = authzContext.getHiveAuthzContext(); try { authorizer.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, hiveAuthzContext); @@ -248,38 +520,34 @@ private void checkPrivileges(final HiveMetaStoreAuthzInfo authzContext, HiveAuth } if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetaStoreAuthorizer.checkPrivileges(): authzContext=" + authzContext + ", authorizer=" + authorizer); + LOG.debug( + "<== HiveMetaStoreAuthorizer.checkPrivileges(): authzContext=" + authzContext + ", authorizer=" + authorizer); } } - private boolean skipAuthorization(HiveMetaStoreAuthzInfo authzContext) { + private boolean skipAuthorization() { if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveMetaStoreAuthorizer.skipAuthorization(): authzContext=" + authzContext); + LOG.debug("==> HiveMetaStoreAuthorizer.skipAuthorization()"); } boolean ret = false; - if (authzContext == null) { + UserGroupInformation ugi = getUGI(); + + if (ugi == null) { ret = true; } else { - - UserGroupInformation ugi = authzContext.getUGI(); - - if (ugi == null) { - ret = true; - } else { - ret = isSuperUser(ugi.getShortUserName()); - } + ret = isSuperUser(ugi.getShortUserName()); } if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetaStoreAuthorizer.skipAuthorization(): authzContext=" + authzContext + "; ret=" + ret); + LOG.debug("<== HiveMetaStoreAuthorizer.skipAuthorization(): " + ret); } return ret; } - private boolean isViewType(Table table) { + private boolean isViewType(Table table) { boolean ret = false; String tableType = table.getTableType(); @@ -312,5 +580,13 @@ private String getCurrentUser() { private String getCurrentUser(HiveMetaStoreAuthorizableEvent authorizableEvent) { return authorizableEvent.getAuthzContext().getUGI().getShortUserName(); } + + private UserGroupInformation getUGI() { + try { + return UserGroupInformation.getCurrentUser(); + } catch (IOException excp) { + } + return null; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java index a372c78..985826b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore; +import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.events.PreEventContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; @@ -38,70 +39,77 @@ private final HiveOperationType operationType; private final List inputHObjs; private final List outputHObjs; - private final String commandString; + private final String commandString; private final HiveAuthzContext hiveAuthzContext; - public HiveMetaStoreAuthzInfo(PreEventContext preEventContext, HiveOperationType operationType, List inputHObjs, List outputHObjs, String commandString) { - this.preEventContext = preEventContext; - this.operationType = operationType; - this.inputHObjs = inputHObjs; - this.outputHObjs = outputHObjs; - this.commandString = commandString; + public HiveMetaStoreAuthzInfo(PreEventContext preEventContext, HiveOperationType operationType, + List inputHObjs, List outputHObjs, String commandString) { + this.preEventContext = preEventContext; + this.operationType = operationType; + this.inputHObjs = inputHObjs; + this.outputHObjs = outputHObjs; + this.commandString = commandString; this.hiveAuthzContext = createHiveAuthzContext(); } - public HiveOperationType getOperationType() { - return operationType; - } + public HiveOperationType getOperationType() { + return operationType; + } - public List getInputHObjs() { return inputHObjs; } + public List getInputHObjs() { + return inputHObjs; + } - public List getOutputHObjs() { return outputHObjs; } + public List getOutputHObjs() { + return outputHObjs; + } - public String getCommandString() { - return commandString; - } + public String getCommandString() { + return commandString; + } - public HiveAuthzContext getHiveAuthzContext() { return hiveAuthzContext; } + public HiveAuthzContext getHiveAuthzContext() { + return hiveAuthzContext; + } - public PreEventContext getPreEventContext(){ - return preEventContext; - } + public PreEventContext getPreEventContext() { + return preEventContext; + } - public UserGroupInformation getUGI() { - try { - return UserGroupInformation.getCurrentUser(); - } catch (IOException excp) { - } - return null; + public UserGroupInformation getUGI() { + try { + return UserGroupInformation.getCurrentUser(); + } catch (IOException excp) { } + return null; + } - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("HiveMetaStoreAuthzInfo= ").append("{"); - sb.append("eventType=").append(preEventContext.getEventType().name()); - sb.append(", operationType=").append(operationType.name()); - sb.append(", commandString=" ).append(commandString); - sb.append(", inputHObjs=").append(inputHObjs); - sb.append(", outputHObjs=").append(outputHObjs); - sb.append(" }"); - return sb.toString(); - } + @Override public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("HiveMetaStoreAuthzInfo= ").append("{"); + String eventType = (preEventContext != null) ? preEventContext.getEventType().name() : StringUtils.EMPTY; + sb.append("eventType=").append(eventType); + sb.append(", operationType=").append(operationType.name()); + sb.append(", commandString=").append(commandString); + sb.append(", inputHObjs=").append(inputHObjs); + sb.append(", outputHObjs=").append(outputHObjs); + sb.append(" }"); + return sb.toString(); + } - private HiveAuthzContext createHiveAuthzContext() { - HiveAuthzContext.Builder builder = new HiveAuthzContext.Builder(); - builder.setCommandString(commandString); + private HiveAuthzContext createHiveAuthzContext() { + HiveAuthzContext.Builder builder = new HiveAuthzContext.Builder(); + builder.setCommandString(commandString); - // TODO: refer to SessionManager/HiveSessionImpl for details on getting ipAddress and forwardedAddresses - builder.setForwardedAddresses(new ArrayList<>()); + // TODO: refer to SessionManager/HiveSessionImpl for details on getting ipAddress and forwardedAddresses + builder.setForwardedAddresses(new ArrayList<>()); - String ipAddress = HiveMetaStore.HMSHandler.getIPAddress(); + String ipAddress = HiveMetaStore.HMSHandler.getIPAddress(); - builder.setUserIpAddress(ipAddress); + builder.setUserIpAddress(ipAddress); - HiveAuthzContext ret = builder.build(); + HiveAuthzContext ret = builder.build(); - return ret; - } + return ret; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadDatabaseEvent.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadDatabaseEvent.java new file mode 100644 index 0000000..56c5c1b --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadDatabaseEvent.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events; + +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.events.PreEventContext; +import org.apache.hadoop.hive.metastore.events.PreReadDatabaseEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class ReadDatabaseEvent extends HiveMetaStoreAuthorizableEvent { + private static final Log LOG = LogFactory.getLog(ReadDatabaseEvent.class); + + private String COMMAND_STR = "use/show databases or tables"; + + public ReadDatabaseEvent(PreEventContext preEventContext) { + super(preEventContext); + } + + @Override public HiveMetaStoreAuthzInfo getAuthzContext() { + HiveMetaStoreAuthzInfo ret = + new HiveMetaStoreAuthzInfo(preEventContext, HiveOperationType.QUERY, getInputHObjs(), getOutputHObjs(), + COMMAND_STR); + + return ret; + } + + private List getInputHObjs() { + if (LOG.isDebugEnabled()) { + LOG.debug("==> ReadDatabaseEvent.getInputHObjs()"); + } + + List ret = new ArrayList<>(); + PreReadDatabaseEvent preReadDatabaseEvent = (PreReadDatabaseEvent) preEventContext; + Database database = preReadDatabaseEvent.getDatabase(); + if (database != null) { + ret.add(getHivePrivilegeObject(database)); + + COMMAND_STR = buildCommandString(COMMAND_STR, database); + + if (LOG.isDebugEnabled()) { + LOG.debug("<== ReadDatabaseEvent.getInputHObjs(): ret=" + ret); + } + } + + return ret; + } + + private List getOutputHObjs() { + return Collections.emptyList(); + } + + private String buildCommandString(String cmdStr, Database db) { + String ret = cmdStr; + + if (db != null) { + String dbName = db.getName(); + ret = ret + (StringUtils.isNotEmpty(dbName) ? " " + dbName : ""); + } + + return ret; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadTableEvent.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadTableEvent.java new file mode 100644 index 0000000..80f3b83 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadTableEvent.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory;; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.events.PreEventContext; +import org.apache.hadoop.hive.metastore.events.PreReadTableEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class ReadTableEvent extends HiveMetaStoreAuthorizableEvent { + private static final Log LOG = LogFactory.getLog(ReadTableEvent.class); + private static final String COMMAND_STR = "select"; + + public ReadTableEvent(PreEventContext preEventContext) { + super(preEventContext); + } + + @Override public HiveMetaStoreAuthzInfo getAuthzContext() { + HiveMetaStoreAuthzInfo ret = + new HiveMetaStoreAuthzInfo(preEventContext, HiveOperationType.QUERY, getInputHObjs(), getOutputHObjs(), + COMMAND_STR); + return ret; + } + + private List getInputHObjs() { + if (LOG.isDebugEnabled()) { + LOG.debug("==> ReadTableEvent.getInputHObjs()"); + } + + List ret = new ArrayList<>(); + PreReadTableEvent preReadTableEvent = (PreReadTableEvent) preEventContext; + String dbName = preReadTableEvent.getTable().getDbName(); + Table table = preReadTableEvent.getTable(); + + ret.add(new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, dbName, null, null, null, + HivePrivilegeObject.HivePrivObjectActionType.OTHER, null, null, table.getOwner(), table.getOwnerType())); + ret.add(getHivePrivilegeObject(table)); + + if (LOG.isDebugEnabled()) { + LOG.debug("<== ReadTableEvent.getInputHObjs()" + ret); + } + + return ret; + } + + private List getOutputHObjs() { + return Collections.emptyList(); + } + +} \ No newline at end of file diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/DatabaseFilterContext.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/DatabaseFilterContext.java new file mode 100644 index 0000000..12ae852 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/DatabaseFilterContext.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class DatabaseFilterContext extends HiveMetaStoreAuthorizableEvent { + + private static final Log LOG = LogFactory.getLog(DatabaseFilterContext.class); + + List databases = null; + + public DatabaseFilterContext(List databases) { + super(null); + this.databases = databases; + getAuthzContext(); + } + + @Override public HiveMetaStoreAuthzInfo getAuthzContext() { + HiveMetaStoreAuthzInfo ret = + new HiveMetaStoreAuthzInfo(preEventContext, HiveOperationType.QUERY, getInputHObjs(), getOutputHObjs(), null); + return ret; + } + + private List getInputHObjs() { + if (LOG.isDebugEnabled()) { + LOG.debug("==> DatabaseFilterContext.getOutputHObjs()"); + } + + List ret = new ArrayList<>(); + for (String database : databases) { + HivePrivilegeObjectType type = HivePrivilegeObjectType.DATABASE; + HivePrivObjectActionType objectActionType = HivePrivObjectActionType.OTHER; + HivePrivilegeObject hivePrivilegeObject = + new HivePrivilegeObject(type, database, null, null, null, objectActionType, null, null); + ret.add(hivePrivilegeObject); + } + + if (LOG.isDebugEnabled()) { + LOG.debug("<== DatabaseFilterContext.getOutputHObjs(): ret=" + ret); + } + + return ret; + } + + private List getOutputHObjs() { + return Collections.emptyList(); + } + + public List getDatabases() { + return databases; + } +} \ No newline at end of file diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/TableFilterContext.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/TableFilterContext.java new file mode 100644 index 0000000..31c0743 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/TableFilterContext.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class TableFilterContext extends HiveMetaStoreAuthorizableEvent { + private static final Log LOG = LogFactory.getLog(DatabaseFilterContext.class); + + List
tables = null; + List tableNames = null; + String dbName = null; + + public TableFilterContext(List
tables) { + super(null); + this.tables = tables; + getAuthzContext(); + } + + public TableFilterContext(String dbName, List tableNames) { + super(null); + this.dbName = dbName; + this.tableNames = tableNames; + } + + @Override public HiveMetaStoreAuthzInfo getAuthzContext() { + HiveMetaStoreAuthzInfo ret = + new HiveMetaStoreAuthzInfo(preEventContext, HiveOperationType.QUERY, getInputHObjs(), getOutputHObjs(), null); + return ret; + } + + private List getInputHObjs() { + if (LOG.isDebugEnabled()) { + LOG.debug("==> TableFilterContext.getOutputHObjs()"); + } + + List ret = new ArrayList<>(); + + if (tables != null) { + for (Table table : tables) { + HivePrivilegeObjectType type = HivePrivilegeObjectType.TABLE_OR_VIEW; + HivePrivObjectActionType objectActionType = HivePrivilegeObject.HivePrivObjectActionType.OTHER; + HivePrivilegeObject hivePrivilegeObject = + new HivePrivilegeObject(type, table.getDbName(), table.getTableName(), null, null, objectActionType, null, + null); + ret.add(hivePrivilegeObject); + } + } else { + for (String tableName : tableNames) { + HivePrivilegeObjectType type = HivePrivilegeObjectType.TABLE_OR_VIEW; + HivePrivObjectActionType objectActionType = HivePrivilegeObject.HivePrivObjectActionType.OTHER; + HivePrivilegeObject hivePrivilegeObject = + new HivePrivilegeObject(type, dbName, tableName, null, null, objectActionType, null, null); + ret.add(hivePrivilegeObject); + } + } + + if (LOG.isDebugEnabled()) { + LOG.debug("<== TableFilterContext.getOutputHObjs(): ret=" + ret); + } + + return ret; + } + + private List getOutputHObjs() { + return Collections.emptyList(); + } + + public List
getTables() { + return tables; + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java index b9c0dcc..930cd21 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore; +import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.metastore.ColumnType; @@ -283,4 +284,46 @@ public void testM_DropCatalog_SuperUser() throws Exception { // no Exceptions for superuser as hive is allowed CREATE CATALOG operation } } + + @Test public void testN__ShowDatabase_authorizedUser() throws Exception { + UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); + try { + hmsHandler.get_all_databases(); + } catch (Exception e) { + // no Exceptions for show database as authorized user. + } + } + + @Test public void testO__ShowDatabase_unAuthorizedUser() throws Exception { + UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); + try { + hmsHandler.get_all_databases(); + } catch (Exception e) { + String err = e.getMessage(); + if (StringUtils.isNotEmpty(err)) { + assert (true); + } + } + } + + @Test public void testP__ShowTables_authorizedUser() throws Exception { + UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); + try { + hmsHandler.get_all_tables("default"); + } catch (Exception e) { + // no Exceptions for show tables as authorized user. + } + } + + @Test public void testQ__ShowTables_unAuthorizedUser() throws Exception { + UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); + try { + hmsHandler.get_all_tables("default"); + } catch (Exception e) { + String err = e.getMessage(); + if (StringUtils.isNotEmpty(err)) { + assert (true); + } + } + } } -- 2.10.1 (Apple Git-78)