From fc17715c04606f5a80ffc5b61d374af3b68049db Mon Sep 17 00:00:00 2001 From: rmani Date: Wed, 4 Dec 2019 18:12:51 -0800 Subject: [PATCH 1/1] HIVE-22573:HMS filter metadata using Ranger plugin --- .../plugin/metastore/HiveMetaStoreAuthorizer.java | 350 +++++++++++++++++++-- .../plugin/metastore/HiveMetaStoreAuthzInfo.java | 4 +- .../plugin/metastore/events/ReadDatabaseEvent.java | 85 +++++ .../plugin/metastore/events/ReadTableEvent.java | 73 +++++ .../filtercontext/DatabaseFilterContext.java | 78 +++++ .../filtercontext/TableFilterContext.java | 98 ++++++ .../metastore/TestHiveMetaStoreAuthorizer.java | 47 +++ 7 files changed, 701 insertions(+), 34 deletions(-) create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadDatabaseEvent.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadTableEvent.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/DatabaseFilterContext.java create mode 100644 ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/TableFilterContext.java diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java index 434d1c9..8a670fe 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java @@ -18,10 +18,12 @@ */ package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.MetaStoreFilterHook; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.MetaStorePreEventListener; import org.apache.hadoop.hive.metastore.TableType; @@ -34,6 +36,11 @@ import org.apache.hadoop.hive.metastore.events.PreDropTableEvent; import org.apache.hadoop.hive.metastore.events.PreEventContext; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; +import org.apache.hadoop.hive.metastore.api.Catalog; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PartitionSpec; +import org.apache.hadoop.hive.metastore.api.TableMeta; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider; @@ -45,18 +52,24 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.DatabaseFilterContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.TableFilterContext; import org.apache.hadoop.security.UserGroupInformation; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; /** * HiveMetaStoreAuthorizer : Do authorization checks on MetaStore Events in MetaStorePreEventListener */ -public class HiveMetaStoreAuthorizer extends MetaStorePreEventListener { +public class HiveMetaStoreAuthorizer extends MetaStorePreEventListener implements MetaStoreFilterHook { private static final Log LOG = LogFactory.getLog(HiveMetaStoreAuthorizer.class); + private static HiveAuthorizer hiveAuthorizer = null; + private static final ThreadLocal tConfig = new ThreadLocal() { @Override protected Configuration initialValue() { @@ -85,38 +98,277 @@ public final void onEvent(PreEventContext preEventContext) throws MetaException, LOG.debug("==> HiveMetaStoreAuthorizer.onEvent(): EventType=" + preEventContext.getEventType()); } - HiveMetaStoreAuthzInfo authzContext = buildAuthzContext(preEventContext); + try { + hiveAuthorizer = createHiveMetaStoreAuthorizer(); + if (!skipAuthorization()) { + HiveMetaStoreAuthzInfo authzContext = buildAuthzContext(preEventContext); + checkPrivileges(authzContext, hiveAuthorizer); + } + } catch (Exception e) { + LOG.error("HiveMetaStoreAuthorizer.onEvent(): failed", e); + throw new MetaException(e.getMessage()); + } - if (!skipAuthorization(authzContext)) { - try { - HiveConf hiveConf = new HiveConf(super.getConf(), HiveConf.class); - HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(hiveConf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); + if (LOG.isDebugEnabled()) { + LOG.debug("<== HiveMetaStoreAuthorizer.onEvent(): EventType=" + preEventContext.getEventType()); + } + } - if (authorizerFactory != null) { - HiveMetastoreAuthenticationProvider authenticator = tAuthenticator.get(); + @Override + public final List filterDatabases(List list) throws MetaException { + if (LOG.isDebugEnabled()) { + LOG.debug("HiveMetaStoreAuthorizer.filterDatabases()"); + } - authenticator.setConf(hiveConf); + if (list == null) { + return Collections.emptyList(); + } - HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder(); + DatabaseFilterContext databaseFilterContext = new DatabaseFilterContext(list); + HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = databaseFilterContext.getAuthzContext(); + List filteredDatabases = filterDatabaseObjects(hiveMetaStoreAuthzInfo); + if (CollectionUtils.isEmpty(filteredDatabases)) { + filteredDatabases = Collections.emptyList(); + } - authzContextBuilder.setClientType(HiveAuthzSessionContext.CLIENT_TYPE.HIVEMETASTORE); - authzContextBuilder.setSessionString("HiveMetaStore"); + if (LOG.isDebugEnabled()) { + LOG.debug("HiveMetaStoreAuthorizer.filterDatabases() :" + filteredDatabases); + } + return filteredDatabases ; + } - HiveAuthzSessionContext authzSessionContext = authzContextBuilder.build(); + @Override + public final Database filterDatabase(Database database) throws MetaException, NoSuchObjectException { + if (database != null) { + String dbName = database.getName(); + List databases = filterDatabases(Collections.singletonList(dbName)); + if (databases.isEmpty()) { + throw new NoSuchObjectException(String.format("Database %s does not exist", dbName)); + } + } + return database; + } - HiveAuthorizer hiveAuthorizer = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), hiveConf, authenticator, authzSessionContext); + @Override + public final List filterTableNames(String s, String s1, List list) throws MetaException { + if (LOG.isDebugEnabled()) { + LOG.debug("==> HiveMetaStoreAuthorizer.filterTableNames()"); + } + List filteredTableNames = null; + if (list != null) { + String dbName = getDBName(s1); + TableFilterContext tableFilterContext = new TableFilterContext(dbName, list); + HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = tableFilterContext.getAuthzContext(); + filteredTableNames = filterTableNames(hiveMetaStoreAuthzInfo, dbName, list); + if (CollectionUtils.isEmpty(filteredTableNames)) { + filteredTableNames = Collections.emptyList(); + } + } - checkPrivileges(authzContext, hiveAuthorizer); - } - } catch (Exception e) { - LOG.error("HiveMetaStoreAuthorizer.onEvent(): failed", e); - throw new MetaException(e.getMessage()); + if (LOG.isDebugEnabled()) { + LOG.debug("<== HiveMetaStoreAuthorizer.filterTableNames() : " + filteredTableNames); + } + + return filteredTableNames; + } + + @Override + public final Table filterTable(Table table) throws MetaException, NoSuchObjectException { + if (table != null) { + List tables = filterTables(Collections.singletonList(table)); + if (tables.isEmpty()) { + throw new NoSuchObjectException(String.format("Database %s does not exist", table.getTableName())); } } + return table; + } + @Override + public final List
filterTables(List
list) throws MetaException { if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetaStoreAuthorizer.onEvent(): EventType=" + preEventContext.getEventType()); + LOG.debug("==> HiveMetaStoreAuthorizer.filterTables()"); + } + + List
filteredTables = null; + + if (list != null) { + TableFilterContext tableFilterContext = new TableFilterContext(list); + HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo = tableFilterContext.getAuthzContext(); + filteredTables = filterTableObjects(hiveMetaStoreAuthzInfo, list); + if (CollectionUtils.isEmpty(filteredTables)) { + filteredTables = Collections.emptyList(); + } } + + if (LOG.isDebugEnabled()) { + LOG.debug("<== HiveMetaStoreAuthorizer.filterTables(): " + filteredTables); + } + return filteredTables; + } + + @Override + public final Catalog filterCatalog(Catalog catalog) throws MetaException { + return catalog; + } + + @Override + public final List filterCatalogs(List catalogs) throws MetaException { + return catalogs; + } + + @Override + public final List filterTableMetas(String catName,String dbName,List tableMetas) throws MetaException { + return tableMetas; + } + + @Override + public final List filterPartitions(List list) throws MetaException { + return list; + } + + @Override + public final List filterPartitionSpecs(List list) throws MetaException { + return list; + } + + @Override + public final Partition filterPartition(Partition partition) throws MetaException, NoSuchObjectException { + return partition; + } + + @Override + public final List filterPartitionNames(String s, String s1, String s2, List list) throws MetaException { + return list; + } + + private List filterDatabaseObjects(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo) throws MetaException { + List ret = null; + + if (LOG.isDebugEnabled()) { + LOG.debug("==> HiveMetaStoreAuthorizer.filterDatabaseObjects()"); + } + + if (hiveAuthorizer == null) { + throw new MetaException("HiveMetaStoreAuthorizer is null!"); + } + + try { + List hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); + HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); + List filteredHivePrivilegeObjects = hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); + if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { + ret = getFilterDatabaseList(filteredHivePrivilegeObjects); + } + } catch (Exception e) { + throw new MetaException("Error in HiveMetaStoreAuthorizer.filterDatabase()" + e.getMessage()); + } + if (LOG.isDebugEnabled()) { + LOG.debug("<== HiveMetaStoreAuthorizer.filterDatabaseObjects() :" + ret ); + } + return ret; + } + + private List
filterTableObjects(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo, List
tableList) throws MetaException { + List
ret = null; + + if (hiveAuthorizer == null) { + throw new MetaException("HiveMetaStoreAuthorizer is null!"); + } + + try { + List hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); + HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); + List filteredHivePrivilegeObjects = hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); + if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { + ret = getFilteredTableList(filteredHivePrivilegeObjects, tableList); + } + } catch (Exception e) { + throw new MetaException("Error in HiveMetaStoreAuthorizer.filterTables()" + e.getMessage()); + } + return ret; + } + + private List getFilterDatabaseList(List hivePrivilegeObjects) { + List ret = new ArrayList<>(); + for(HivePrivilegeObject hivePrivilegeObject:hivePrivilegeObjects) { + String dbName = hivePrivilegeObject.getDbname(); + ret.add(dbName); + } + return ret; + } + + private List
getFilteredTableList(List hivePrivilegeObjects, List
tableList) { + List
ret = new ArrayList<>(); + for(HivePrivilegeObject hivePrivilegeObject:hivePrivilegeObjects) { + String dbName = hivePrivilegeObject.getDbname(); + String tblName = hivePrivilegeObject.getObjectName(); + Table table = getFilteredTable(dbName,tblName,tableList); + if (table != null) { + ret.add(table); + } + } + return ret; + } + + private Table getFilteredTable(String dbName, String tblName, List
tableList) { + Table ret = null; + for (Table table: tableList) { + String databaseName = table.getDbName(); + String tableName = table.getTableName(); + if (dbName.equals(databaseName) && tblName.equals(tableName)) { + ret = table; + break; + } + } + return ret; + } + + private List filterTableNames(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo, String dbName, List tableNames) throws MetaException { + List ret = null; + + if (hiveAuthorizer == null) { + throw new MetaException("HiveMetaStoreAuthorizer is null!"); + } + + try { + List hivePrivilegeObjects = hiveMetaStoreAuthzInfo.getInputHObjs(); + HiveAuthzContext hiveAuthzContext = hiveMetaStoreAuthzInfo.getHiveAuthzContext(); + List filteredHivePrivilegeObjects = hiveAuthorizer.filterListCmdObjects(hivePrivilegeObjects, hiveAuthzContext); + if (CollectionUtils.isNotEmpty(filteredHivePrivilegeObjects)) { + ret = getFilteredTableNames(filteredHivePrivilegeObjects, dbName, tableNames); + } + } catch (Exception e) { + throw new MetaException("Error in HiveMetaStoreAuthorizer.filterTables()" + e.getMessage()); + } + return ret; + } + + private List getFilteredTableNames(List hivePrivilegeObjects, String databaseName, List tableNames) { + List ret = new ArrayList<>(); + for(HivePrivilegeObject hivePrivilegeObject:hivePrivilegeObjects) { + String dbName = hivePrivilegeObject.getDbname(); + String tblName = hivePrivilegeObject.getObjectName(); + String table = getFilteredTableNames(dbName, tblName, databaseName, tableNames); + if (table != null) { + ret.add(table); + } + } + return ret; + } + + private String getFilteredTableNames(String dbName, String tblName, String databaseName, List tableNames) { + String ret = null; + for (String tableName: tableNames) { + if (dbName.equals(databaseName) && tblName.equals(tableName)) { + ret = tableName; + break; + } + } + return ret; + } + + private String getDBName(String str) { + return (str != null) ? str.substring(str.indexOf("#")+1) : null; } HiveMetaStoreAuthzInfo buildAuthzContext(PreEventContext preEventContext) throws MetaException { @@ -168,6 +420,12 @@ HiveMetaStoreAuthzInfo buildAuthzContext(PreEventContext preEventContext) throws case DROP_PARTITION: authzEvent = new DropPartitionEvent(preEventContext); break; + case READ_TABLE: + authzEvent = new ReadTableEvent(preEventContext); + break; + case READ_DATABASE: + authzEvent = new ReadDatabaseEvent(preEventContext); + break; case AUTHORIZATION_API_CALL: case READ_ISCHEMA: case CREATE_ISCHEMA: @@ -198,6 +456,29 @@ HiveMetaStoreAuthzInfo buildAuthzContext(PreEventContext preEventContext) throws return ret; } + HiveAuthorizer createHiveMetaStoreAuthorizer() throws Exception { + HiveAuthorizer ret = null; + HiveConf hiveConf = new HiveConf(super.getConf(), HiveConf.class); + HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(hiveConf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); + + if (authorizerFactory != null) { + HiveMetastoreAuthenticationProvider authenticator = tAuthenticator.get(); + + authenticator.setConf(hiveConf); + + HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder(); + + authzContextBuilder.setClientType(HiveAuthzSessionContext.CLIENT_TYPE.HIVEMETASTORE); + authzContextBuilder.setSessionString("HiveMetaStore"); + + HiveAuthzSessionContext authzSessionContext = authzContextBuilder.build(); + + ret = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), hiveConf, authenticator, authzSessionContext); + } + + return ret; + } + boolean isSuperUser(String userName) { Configuration conf = getConf(); String ipAddress = HiveMetaStore.HMSHandler.getIPAddress(); @@ -252,28 +533,23 @@ private void checkPrivileges(final HiveMetaStoreAuthzInfo authzContext, HiveAuth } } - private boolean skipAuthorization(HiveMetaStoreAuthzInfo authzContext) { + private boolean skipAuthorization() { if (LOG.isDebugEnabled()) { - LOG.debug("==> HiveMetaStoreAuthorizer.skipAuthorization(): authzContext=" + authzContext); + LOG.debug("==> HiveMetaStoreAuthorizer.skipAuthorization()"); } boolean ret = false; - if (authzContext == null) { - ret = true; - } else { - - UserGroupInformation ugi = authzContext.getUGI(); + UserGroupInformation ugi = getUGI(); - if (ugi == null) { - ret = true; - } else { - ret = isSuperUser(ugi.getShortUserName()); - } + if (ugi == null) { + ret = true; + } else { + ret = isSuperUser(ugi.getShortUserName()); } if (LOG.isDebugEnabled()) { - LOG.debug("<== HiveMetaStoreAuthorizer.skipAuthorization(): authzContext=" + authzContext + "; ret=" + ret); + LOG.debug("<== HiveMetaStoreAuthorizer.skipAuthorization(): " + ret); } return ret; @@ -312,5 +588,13 @@ private String getCurrentUser() { private String getCurrentUser(HiveMetaStoreAuthorizableEvent authorizableEvent) { return authorizableEvent.getAuthzContext().getUGI().getShortUserName(); } + + private UserGroupInformation getUGI() { + try { + return UserGroupInformation.getCurrentUser(); + } catch (IOException excp) { + } + return null; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java index a372c78..649d34e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore; +import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.events.PreEventContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; @@ -80,7 +81,8 @@ public UserGroupInformation getUGI() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("HiveMetaStoreAuthzInfo= ").append("{"); - sb.append("eventType=").append(preEventContext.getEventType().name()); + String eventType = (preEventContext != null)? preEventContext.getEventType().name(): StringUtils.EMPTY; + sb.append("eventType=").append(eventType); sb.append(", operationType=").append(operationType.name()); sb.append(", commandString=" ).append(commandString); sb.append(", inputHObjs=").append(inputHObjs); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadDatabaseEvent.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadDatabaseEvent.java new file mode 100644 index 0000000..01fdbcf --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadDatabaseEvent.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events; + +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.events.PreEventContext; +import org.apache.hadoop.hive.metastore.events.PreReadDatabaseEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class ReadDatabaseEvent extends HiveMetaStoreAuthorizableEvent { + private static final Log LOG = LogFactory.getLog(ReadDatabaseEvent.class); + + private String COMMAND_STR = "use/show databases or tables"; + + public ReadDatabaseEvent(PreEventContext preEventContext) { + super(preEventContext); + } + + @Override + public HiveMetaStoreAuthzInfo getAuthzContext() { + HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, HiveOperationType.QUERY, getInputHObjs(), getOutputHObjs(), COMMAND_STR); + + return ret; + } + + private List getInputHObjs() { + if (LOG.isDebugEnabled()) { + LOG.debug("==> ReadDatabaseEvent.getInputHObjs()"); + } + + List ret = new ArrayList<>(); + PreReadDatabaseEvent preReadDatabaseEvent = (PreReadDatabaseEvent) preEventContext; + Database database = preReadDatabaseEvent.getDatabase(); + if (database != null) { + ret.add(getHivePrivilegeObject(database)); + + COMMAND_STR = buildCommandString(COMMAND_STR, database); + + if (LOG.isDebugEnabled()) { + LOG.debug("<== ReadDatabaseEvent.getInputHObjs(): ret=" + ret); + } + } + + return ret; + } + + private List getOutputHObjs() { return Collections.emptyList(); } + + private String buildCommandString(String cmdStr, Database db) { + String ret = cmdStr; + + if (db != null) { + String dbName = db.getName(); + ret = ret + (StringUtils.isNotEmpty(dbName) ? " " + dbName : ""); + } + + return ret; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadTableEvent.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadTableEvent.java new file mode 100644 index 0000000..d239daf --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/ReadTableEvent.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory;; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.events.PreEventContext; +import org.apache.hadoop.hive.metastore.events.PreReadTableEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class ReadTableEvent extends HiveMetaStoreAuthorizableEvent { + private static final Log LOG = LogFactory.getLog(ReadTableEvent.class); + private static final String COMMAND_STR = "select"; + + public ReadTableEvent(PreEventContext preEventContext) { + super(preEventContext); + } + + @Override + public HiveMetaStoreAuthzInfo getAuthzContext() { + HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, HiveOperationType.QUERY, getInputHObjs(), getOutputHObjs(), COMMAND_STR); + return ret; + } + + private List getInputHObjs() { + if (LOG.isDebugEnabled()) { + LOG.debug("==> ReadTableEvent.getInputHObjs()"); + } + + List ret = new ArrayList<>(); + PreReadTableEvent preReadTableEvent = (PreReadTableEvent) preEventContext; + String dbName = preReadTableEvent.getTable().getDbName(); + Table table = preReadTableEvent.getTable(); + + ret.add(new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, dbName, null, null, null, + HivePrivilegeObject.HivePrivObjectActionType.OTHER, null, null, table.getOwner(), table.getOwnerType())); + ret.add(getHivePrivilegeObject(table)); + + if (LOG.isDebugEnabled()) { + LOG.debug("<== ReadTableEvent.getInputHObjs()" + ret); + } + return ret; + } + + private List getOutputHObjs() { return Collections.emptyList(); } + +} \ No newline at end of file diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/DatabaseFilterContext.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/DatabaseFilterContext.java new file mode 100644 index 0000000..0113d09 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/DatabaseFilterContext.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class DatabaseFilterContext extends HiveMetaStoreAuthorizableEvent { + + private static final Log LOG = LogFactory.getLog(DatabaseFilterContext.class); + + List databases = null; + + public DatabaseFilterContext(List databases) { + super(null); + this.databases = databases; + getAuthzContext(); + } + + @Override + public HiveMetaStoreAuthzInfo getAuthzContext() { + HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, HiveOperationType.QUERY, getInputHObjs(), getOutputHObjs(), null); + return ret; + } + + private List getInputHObjs() { + if (LOG.isDebugEnabled()) { + LOG.debug("==> DatabaseFilterContext.getOutputHObjs()"); + } + + List ret = new ArrayList<>(); + for(String database: databases) { + HivePrivilegeObjectType type = HivePrivilegeObjectType.DATABASE; + HivePrivObjectActionType objectActionType = HivePrivObjectActionType.OTHER; + HivePrivilegeObject hivePrivilegeObject = new HivePrivilegeObject(type, database, null, null, null, objectActionType, null, null); + ret.add(hivePrivilegeObject); + } + + if (LOG.isDebugEnabled()) { + LOG.debug("<== DatabaseFilterContext.getOutputHObjs(): ret=" + ret); + } + + return ret; + } + + private List getOutputHObjs() { return Collections.emptyList(); } + + public List getDatabases() { + return databases; + } +} \ No newline at end of file diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/TableFilterContext.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/TableFilterContext.java new file mode 100644 index 0000000..47b91f7 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/filtercontext/TableFilterContext.java @@ -0,0 +1,98 @@ + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent; +import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class TableFilterContext extends HiveMetaStoreAuthorizableEvent { + private static final Log LOG = LogFactory.getLog(DatabaseFilterContext.class); + + List
tables = null; + List tableNames = null; + String dbName = null; + + public TableFilterContext(List
tables) { + super(null); + this.tables = tables; + getAuthzContext(); + } + + public TableFilterContext(String dbName, List tableNames) { + super(null); + this.dbName = dbName; + this.tableNames = tableNames; + } + + @Override + public HiveMetaStoreAuthzInfo getAuthzContext() { + HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, HiveOperationType.QUERY, getInputHObjs(), getOutputHObjs(), null); + return ret; + } + + private List getInputHObjs() { + if (LOG.isDebugEnabled()) { + LOG.debug("==> TableFilterContext.getOutputHObjs()"); + } + + List ret = new ArrayList<>(); + + if (tables != null) { + for (Table table : tables) { + HivePrivilegeObjectType type = HivePrivilegeObjectType.TABLE_OR_VIEW; + HivePrivObjectActionType objectActionType = HivePrivilegeObject.HivePrivObjectActionType.OTHER; + HivePrivilegeObject hivePrivilegeObject = new HivePrivilegeObject(type, table.getDbName(), table.getTableName(), null, null, objectActionType, null, null); + ret.add(hivePrivilegeObject); + } + } else { + for (String tableName : tableNames) { + HivePrivilegeObjectType type = HivePrivilegeObjectType.TABLE_OR_VIEW; + HivePrivObjectActionType objectActionType = HivePrivilegeObject.HivePrivObjectActionType.OTHER; + HivePrivilegeObject hivePrivilegeObject = new HivePrivilegeObject(type, dbName, tableName, null, null, objectActionType, null, null); + ret.add(hivePrivilegeObject); + } + } + + if (LOG.isDebugEnabled()) { + LOG.debug("<== TableFilterContext.getOutputHObjs(): ret=" + ret); + } + + return ret; + } + + private List getOutputHObjs() { return Collections.emptyList(); } + + public List
getTables() { + return tables; + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java index b9c0dcc..f6af926 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore; +import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.metastore.ColumnType; @@ -283,4 +284,50 @@ public void testM_DropCatalog_SuperUser() throws Exception { // no Exceptions for superuser as hive is allowed CREATE CATALOG operation } } + + @Test + public void testN__ShowDatabase_authorizedUser() throws Exception { + UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); + try { + hmsHandler.get_all_databases(); + } catch (Exception e) { + // no Exceptions for show database as authorized user. + } + } + + @Test + public void testO__ShowDatabase_unAuthorizedUser() throws Exception { + UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); + try { + hmsHandler.get_all_databases(); + } catch (Exception e) { + String err = e.getMessage(); + if (StringUtils.isNotEmpty(err)) { + assert(true); + } + } + } + + @Test + public void testP__ShowTables_authorizedUser() throws Exception { + UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser)); + try { + hmsHandler.get_all_tables("default"); + } catch (Exception e) { + // no Exceptions for show tables as authorized user. + } + } + + @Test + public void testQ__ShowTables_unAuthorizedUser() throws Exception { + UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser)); + try { + hmsHandler.get_all_tables("default"); + } catch (Exception e) { + String err = e.getMessage(); + if (StringUtils.isNotEmpty(err)) { + assert(true); + } + } + } } -- 2.10.1 (Apple Git-78)