diff --git a/metastore/if/hive_metastore.thrift b/metastore/if/hive_metastore.thrift index c400cdf..b3f01d6 100755 --- a/metastore/if/hive_metastore.thrift +++ b/metastore/if/hive_metastore.thrift @@ -138,6 +138,11 @@ struct Role { 1: string roleName, 2: i32 createTime, 3: string ownerName, + + // Following fields are populated by list_roles + // They are ignored during other commands such as role creation + // See RolePrincipalGrant which gives a 'normalized' representation + // of this information 4: optional string principalName, 5: optional string principalType, 6: optional bool grantOption, @@ -145,6 +150,25 @@ struct Role { 8: optional string grantor } +// Representation of a grant for a principal to a role +struct RolePrincipalGrant { + 1: string roleName, + 2: string principalName, + 3: PrincipalType principalType, + 4: bool grantOption, + 5: i32 grantTime, + 6: string grantorName, + 7: PrincipalType grantorPrincipalType +} + +struct GetPrincipalsInRoleRequest { + 1: string roleName; +} + +struct GetPrincipalsInRoleResponse { + 1: list principalGrants; +} + // namespace for tables struct Database { 1: string name, @@ -916,6 +940,11 @@ service ThriftHiveMetastore extends fb303.FacebookService throws(1:MetaException o1) list list_roles(1:string principal_name, 2:PrincipalType principal_type) throws(1:MetaException o1) + // get all role-grants for users/roles that have been granted the given role + // Note that in the returned list of RolePrincipalGrants, the roleName is + // redundant as it would match the role_name argument of this function + GetPrincipalsInRoleResponse get_principals_in_role(1: GetPrincipalsInRoleRequest request) throws(1:MetaException o1) + PrincipalPrivilegeSet get_privilege_set(1:HiveObjectRef hiveObject, 2:string user_name, 3: list group_names) throws(1:MetaException o1) list list_privileges(1:string principal_name, 2:PrincipalType principal_type, diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 0cabf35..fbfb062 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hive.metastore; -import com.facebook.fb303.FacebookBase; -import com.facebook.fb303.fb_status; import static org.apache.commons.lang.StringUtils.join; import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_COMMENT; import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; @@ -44,7 +42,6 @@ import java.util.Properties; import java.util.Set; import java.util.Timer; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Pattern; import org.apache.commons.cli.OptionBuilder; @@ -53,7 +50,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; -import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.hive.common.classification.InterfaceAudience; @@ -82,6 +78,8 @@ import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse; import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse; +import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest; +import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleResponse; import org.apache.hadoop.hive.metastore.api.HeartbeatRequest; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; @@ -112,6 +110,7 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.RequestPartsSpec; import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant; import org.apache.hadoop.hive.metastore.api.ShowCompactRequest; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; import org.apache.hadoop.hive.metastore.api.ShowLocksRequest; @@ -120,6 +119,7 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.TableStatsRequest; import org.apache.hadoop.hive.metastore.api.TableStatsResult; +import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore; import org.apache.hadoop.hive.metastore.api.TxnAbortedException; import org.apache.hadoop.hive.metastore.api.TxnOpenException; import org.apache.hadoop.hive.metastore.api.Type; @@ -128,8 +128,6 @@ import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.metastore.api.UnlockRequest; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; -import org.apache.hadoop.hive.metastore.txn.TxnHandler; -import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore; import org.apache.hadoop.hive.metastore.events.AddPartitionEvent; import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent; import org.apache.hadoop.hive.metastore.events.AlterTableEvent; @@ -158,6 +156,7 @@ import org.apache.hadoop.hive.metastore.model.MRoleMap; import org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege; import org.apache.hadoop.hive.metastore.model.MTablePrivilege; +import org.apache.hadoop.hive.metastore.txn.TxnHandler; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; @@ -172,8 +171,14 @@ import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.server.TServer; import org.apache.thrift.server.TThreadPoolServer; -import org.apache.thrift.transport.*; +import org.apache.thrift.transport.TFramedTransport; +import org.apache.thrift.transport.TServerSocket; +import org.apache.thrift.transport.TServerTransport; +import org.apache.thrift.transport.TTransport; +import org.apache.thrift.transport.TTransportFactory; +import com.facebook.fb303.FacebookBase; +import com.facebook.fb303.fb_status; import com.google.common.base.Splitter; import com.google.common.collect.Lists; @@ -4027,6 +4032,8 @@ private boolean isNewRoleAParent(String newRole, String curRole) throws MetaExce } } + + @Override public boolean create_role(final Role role) throws MetaException, TException { @@ -4875,8 +4882,47 @@ public ShowCompactResponse show_compact(ShowCompactRequest rqst) throws TExcepti throw new TException(e); } } + + @Override + public GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleRequest request) + throws MetaException, TException { + + incrementCounter("get_principals_in_role"); + String role_name = request.getRoleName(); + List rolePrinGrantList = new ArrayList(); + try { + List roleMaps = getMS().listRoleMembers(role_name); + if (roleMaps != null) { + //convert each MRoleMap object into a thrift RolePrincipalGrant object + for (MRoleMap roleMap : roleMaps) { + String mapRoleName = roleMap.getRole().getRoleName(); + if (!role_name.equals(mapRoleName)) { + // should not happen + throw new AssertionError("Role name " + mapRoleName + " does not match role name arg " + + role_name); + } + RolePrincipalGrant rolePrinGrant = new RolePrincipalGrant( + role_name, + roleMap.getPrincipalName(), + PrincipalType.valueOf(roleMap.getPrincipalType()), + roleMap.getGrantOption(), + roleMap.getAddTime(), + roleMap.getGrantor(), + PrincipalType.valueOf(roleMap.getGrantorType()) + ); + rolePrinGrantList.add(rolePrinGrant); + } + } + return new GetPrincipalsInRoleResponse(rolePrinGrantList); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + } } + public static IHMSHandler newHMSHandler(String name, HiveConf hiveConf) throws MetaException { return RetryingHMSHandler.getProxy(hiveConf, name); } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java index 8d5b225..529e538 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java @@ -27,6 +27,8 @@ import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.net.InetAddress; +import java.net.URI; +import java.net.URISyntaxException; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.ArrayList; @@ -60,12 +62,13 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.DropPartitionsExpr; import org.apache.hadoop.hive.metastore.api.DropPartitionsRequest; -import org.apache.hadoop.hive.metastore.api.DropPartitionsResult; import org.apache.hadoop.hive.metastore.api.EnvironmentContext; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Function; import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse; import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse; +import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest; +import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleResponse; import org.apache.hadoop.hive.metastore.api.HeartbeatRequest; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; @@ -119,12 +122,6 @@ import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; -import java.net.URI; -import java.net.URISyntaxException; - -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; -import static org.apache.hadoop.hive.metastore.MetaStoreUtils.isIndexTable; - /** * Hive Metastore Client. */ @@ -224,6 +221,7 @@ private void promoteRandomMetaStoreURI() { metastoreUris[index] = tmp; } + @Override public void reconnect() throws MetaException { if (localMetaStore) { // For direct DB connections we don't yet support reestablishing connections. @@ -250,6 +248,7 @@ public void reconnect() throws MetaException { * java.lang.String, java.lang.String, * org.apache.hadoop.hive.metastore.api.Table) */ + @Override public void alter_table(String dbname, String tbl_name, Table new_tbl) throws InvalidOperationException, MetaException, TException { alter_table(dbname, tbl_name, new_tbl, null); @@ -271,6 +270,7 @@ public void alter_table(String dbname, String tbl_name, Table new_tbl, * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#rename_partition( * java.lang.String, java.lang.String, java.util.List, org.apache.hadoop.hive.metastore.api.Partition) */ + @Override public void renamePartition(final String dbname, final String name, final List part_vals, final Partition newPart) throws InvalidOperationException, MetaException, TException { client.rename_partition(dbname, name, part_vals, newPart); @@ -381,6 +381,7 @@ public String getTokenStrForm() throws IOException { return tokenStrForm; } + @Override public void close() { isConnected = false; try { @@ -406,6 +407,7 @@ public void close() { * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#add_partition(org.apache.hadoop.hive.metastore.api.Partition) */ + @Override public Partition add_partition(Partition new_part) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { @@ -426,6 +428,7 @@ public Partition add_partition(Partition new_part, EnvironmentContext envContext * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#add_partitions(List) */ + @Override public int add_partitions(List new_parts) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { @@ -459,6 +462,7 @@ public int add_partitions(List new_parts) * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#append_partition(java.lang.String, * java.lang.String, java.util.List) */ + @Override public Partition appendPartition(String db_name, String table_name, List part_vals) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { @@ -472,6 +476,7 @@ public Partition appendPartition(String db_name, String table_name, List part_vals, envContext)); } + @Override public Partition appendPartition(String dbName, String tableName, String partName) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { return appendPartition(dbName, tableName, partName, null); @@ -500,6 +505,7 @@ public Partition exchange_partition(Map partitionSpecs, destDb, destinationTableName); } + @Override public void validatePartitionNameCharacters(List partVals) throws TException, MetaException { client.partition_name_has_valid_characters(partVals, true); @@ -514,6 +520,7 @@ public void validatePartitionNameCharacters(List partVals) * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_database(Database) */ + @Override public void createDatabase(Database db) throws AlreadyExistsException, InvalidObjectException, MetaException, TException { client.create_database(db); @@ -526,6 +533,7 @@ public void createDatabase(Database db) * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#create_table(org.apache.hadoop.hive.metastore.api.Table) */ + @Override public void createTable(Table tbl) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException { createTable(tbl, null); @@ -573,16 +581,19 @@ public boolean createType(Type type) throws AlreadyExistsException, * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_database(java.lang.String, boolean, boolean) */ + @Override public void dropDatabase(String name) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { dropDatabase(name, true, false, false); } + @Override public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { dropDatabase(name, deleteData, ignoreUnknownDb, false); } + @Override public void dropDatabase(String name, boolean deleteData, boolean ignoreUnknownDb, boolean cascade) throws NoSuchObjectException, InvalidOperationException, MetaException, TException { try { @@ -630,6 +641,7 @@ public boolean dropPartition(String db_name, String tbl_name, List part_ return dropPartition(db_name, tbl_name, part_vals, true, env_context); } + @Override public boolean dropPartition(String dbName, String tableName, String partName, boolean deleteData) throws NoSuchObjectException, MetaException, TException { return dropPartition(dbName, tableName, partName, deleteData, null); @@ -654,6 +666,7 @@ public boolean dropPartition(String dbName, String tableName, String partName, b * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_partition(java.lang.String, * java.lang.String, java.util.List, boolean) */ + @Override public boolean dropPartition(String db_name, String tbl_name, List part_vals, boolean deleteData) throws NoSuchObjectException, MetaException, TException { @@ -697,12 +710,14 @@ public boolean dropPartition(String db_name, String tbl_name, List part_ * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_table(java.lang.String, * java.lang.String, boolean) */ + @Override public void dropTable(String dbname, String name) throws NoSuchObjectException, MetaException, TException { dropTable(dbname, name, true, true, null); } /** {@inheritDoc} */ + @Override @Deprecated public void dropTable(String tableName, boolean deleteData) throws MetaException, UnknownTableException, TException, NoSuchObjectException { @@ -720,6 +735,7 @@ public void dropTable(String tableName, boolean deleteData) * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#drop_table(java.lang.String, * java.lang.String, boolean) */ + @Override public void dropTable(String dbname, String name, boolean deleteData, boolean ignoreUnknownTab) throws MetaException, TException, NoSuchObjectException, UnsupportedOperationException { @@ -795,6 +811,7 @@ public boolean dropType(String type) throws NoSuchObjectException, MetaException } /** {@inheritDoc} */ + @Override public List getDatabases(String databasePattern) throws MetaException { try { @@ -806,6 +823,7 @@ public boolean dropType(String type) throws NoSuchObjectException, MetaException } /** {@inheritDoc} */ + @Override public List getAllDatabases() throws MetaException { try { return client.get_all_databases(); @@ -824,6 +842,7 @@ public boolean dropType(String type) throws NoSuchObjectException, MetaException * @throws MetaException * @throws TException */ + @Override public List listPartitions(String db_name, String tbl_name, short max_parts) throws NoSuchObjectException, MetaException, TException { return deepCopyPartitions( @@ -869,6 +888,7 @@ public boolean dropType(String type) throws NoSuchObjectException, MetaException * @throws NoSuchObjectException * @throws TException */ + @Override public List listPartitionsByFilter(String db_name, String tbl_name, String filter, short max_parts) throws MetaException, NoSuchObjectException, TException { @@ -915,6 +935,7 @@ public boolean listPartitionsByExpr(String db_name, String tbl_name, byte[] expr * @throws TException * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_database(java.lang.String) */ + @Override public Database getDatabase(String name) throws NoSuchObjectException, MetaException, TException { return deepCopy(client.get_database(name)); @@ -930,11 +951,13 @@ public Database getDatabase(String name) throws NoSuchObjectException, * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_partition(java.lang.String, * java.lang.String, java.util.List) */ + @Override public Partition getPartition(String db_name, String tbl_name, List part_vals) throws NoSuchObjectException, MetaException, TException { return deepCopy(client.get_partition(db_name, tbl_name, part_vals)); } + @Override public List getPartitionsByNames(String db_name, String tbl_name, List part_names) throws NoSuchObjectException, MetaException, TException { return deepCopyPartitions(client.get_partitions_by_names(db_name, tbl_name, part_names)); @@ -960,12 +983,14 @@ public Partition getPartitionWithAuthInfo(String db_name, String tbl_name, * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_table(java.lang.String, * java.lang.String) */ + @Override public Table getTable(String dbname, String name) throws MetaException, TException, NoSuchObjectException { return deepCopy(client.get_table(dbname, name)); } /** {@inheritDoc} */ + @Override @Deprecated public Table getTable(String tableName) throws MetaException, TException, NoSuchObjectException { @@ -973,12 +998,14 @@ public Table getTable(String tableName) throws MetaException, TException, } /** {@inheritDoc} */ + @Override public List getTableObjectsByName(String dbName, List tableNames) throws MetaException, InvalidOperationException, UnknownDBException, TException { return deepCopyTables(client.get_table_objects_by_name(dbName, tableNames)); } /** {@inheritDoc} */ + @Override public List listTableNamesByFilter(String dbName, String filter, short maxTables) throws MetaException, TException, InvalidOperationException, UnknownDBException { return client.get_table_names_by_filter(dbName, filter, maxTables); @@ -997,6 +1024,7 @@ public Type getType(String name) throws NoSuchObjectException, MetaException, TE } /** {@inheritDoc} */ + @Override public List getTables(String dbname, String tablePattern) throws MetaException { try { return client.get_tables(dbname, tablePattern); @@ -1007,6 +1035,7 @@ public Type getType(String name) throws NoSuchObjectException, MetaException, TE } /** {@inheritDoc} */ + @Override public List getAllTables(String dbname) throws MetaException { try { return client.get_all_tables(dbname); @@ -1016,6 +1045,7 @@ public Type getType(String name) throws NoSuchObjectException, MetaException, TE return null; } + @Override public boolean tableExists(String databaseName, String tableName) throws MetaException, TException, UnknownDBException { try { @@ -1027,12 +1057,14 @@ public boolean tableExists(String databaseName, String tableName) throws MetaExc } /** {@inheritDoc} */ + @Override @Deprecated public boolean tableExists(String tableName) throws MetaException, TException, UnknownDBException { return tableExists(DEFAULT_DATABASE_NAME, tableName); } + @Override public List listPartitionNames(String dbName, String tblName, short max) throws MetaException, TException { return client.get_partition_names(dbName, tblName, max); @@ -1045,16 +1077,19 @@ public boolean tableExists(String tableName) throws MetaException, return client.get_partition_names_ps(db_name, tbl_name, part_vals, max_parts); } + @Override public void alter_partition(String dbName, String tblName, Partition newPart) throws InvalidOperationException, MetaException, TException { client.alter_partition(dbName, tblName, newPart); } + @Override public void alter_partitions(String dbName, String tblName, List newParts) throws InvalidOperationException, MetaException, TException { client.alter_partitions(dbName, tblName, newParts); } + @Override public void alterDatabase(String dbName, Database db) throws MetaException, NoSuchObjectException, TException { client.alter_database(dbName, db); @@ -1069,6 +1104,7 @@ public void alterDatabase(String dbName, Database db) * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_fields(java.lang.String, * java.lang.String) */ + @Override public List getFields(String db, String tableName) throws MetaException, TException, UnknownTableException, UnknownDBException { @@ -1085,6 +1121,7 @@ public void alterDatabase(String dbName, Database db) * @throws TException * @throws AlreadyExistsException */ + @Override public void createIndex(Index index, Table indexTable) throws AlreadyExistsException, InvalidObjectException, MetaException, NoSuchObjectException, TException { client.add_index(index, indexTable); } @@ -1100,6 +1137,7 @@ public void createIndex(Index index, Table indexTable) throws AlreadyExistsExcep * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#alter_index(java.lang.String, * java.lang.String, java.lang.String, org.apache.hadoop.hive.metastore.api.Index) */ + @Override public void alter_index(String dbname, String base_tbl_name, String idx_name, Index new_idx) throws InvalidOperationException, MetaException, TException { client.alter_index(dbname, base_tbl_name, idx_name, new_idx); @@ -1115,6 +1153,7 @@ public void alter_index(String dbname, String base_tbl_name, String idx_name, In * @throws NoSuchObjectException * @throws TException */ + @Override public Index getIndex(String dbName, String tblName, String indexName) throws MetaException, UnknownTableException, NoSuchObjectException, TException { @@ -1131,6 +1170,7 @@ public Index getIndex(String dbName, String tblName, String indexName) * @throws MetaException * @throws TException */ + @Override public List listIndexNames(String dbName, String tblName, short max) throws MetaException, TException { return client.get_index_names(dbName, tblName, max); @@ -1146,12 +1186,14 @@ public Index getIndex(String dbName, String tblName, String indexName) * @throws MetaException * @throws TException */ + @Override public List listIndexes(String dbName, String tblName, short max) throws NoSuchObjectException, MetaException, TException { return client.get_indexes(dbName, tblName, max); } /** {@inheritDoc} */ + @Override public boolean updateTableColumnStatistics(ColumnStatistics statsObj) throws NoSuchObjectException, InvalidObjectException, MetaException, TException, InvalidInputException{ @@ -1159,6 +1201,7 @@ public boolean updateTableColumnStatistics(ColumnStatistics statsObj) } /** {@inheritDoc} */ + @Override public boolean updatePartitionColumnStatistics(ColumnStatistics statsObj) throws NoSuchObjectException, InvalidObjectException, MetaException, TException, InvalidInputException{ @@ -1166,6 +1209,7 @@ public boolean updatePartitionColumnStatistics(ColumnStatistics statsObj) } /** {@inheritDoc} */ + @Override public List getTableColumnStatistics(String dbName, String tableName, List colNames) throws NoSuchObjectException, MetaException, TException, InvalidInputException, InvalidObjectException { @@ -1174,6 +1218,7 @@ public boolean updatePartitionColumnStatistics(ColumnStatistics statsObj) } /** {@inheritDoc} */ + @Override public Map> getPartitionColumnStatistics( String dbName, String tableName, List partNames, List colNames) throws NoSuchObjectException, MetaException, TException { @@ -1182,6 +1227,7 @@ public boolean updatePartitionColumnStatistics(ColumnStatistics statsObj) } /** {@inheritDoc} */ + @Override public boolean deletePartitionColumnStatistics(String dbName, String tableName, String partName, String colName) throws NoSuchObjectException, InvalidObjectException, MetaException, TException, InvalidInputException @@ -1190,6 +1236,7 @@ public boolean deletePartitionColumnStatistics(String dbName, String tableName, } /** {@inheritDoc} */ + @Override public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName) throws NoSuchObjectException, InvalidObjectException, MetaException, TException, InvalidInputException @@ -1207,17 +1254,20 @@ public boolean deleteTableColumnStatistics(String dbName, String tableName, Stri * @see org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.Iface#get_schema(java.lang.String, * java.lang.String) */ + @Override public List getSchema(String db, String tableName) throws MetaException, TException, UnknownTableException, UnknownDBException { return deepCopyFieldSchemas(client.get_schema(db, tableName)); } + @Override public String getConfigValue(String name, String defaultValue) throws TException, ConfigValSecurityException { return client.get_config_value(name, defaultValue); } + @Override public Partition getPartition(String db, String tableName, String partName) throws MetaException, TException, UnknownTableException, NoSuchObjectException { return deepCopy(client.get_partition_by_name(db, tableName, partName)); @@ -1402,6 +1452,12 @@ public boolean drop_role(String roleName) throws MetaException, TException { } @Override + public GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleRequest req) + throws MetaException, TException { + return client.get_principals_in_role(req); + } + + @Override public boolean grant_privileges(PrivilegeBag privileges) throws MetaException, TException { return client.grant_privileges(privileges); @@ -1656,6 +1712,7 @@ public static IMetaStoreClient newSynchronizedClient( this.client = client; } + @Override public Object invoke(Object proxy, Method method, Object [] args) throws Throwable { try { diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java index 612a1a7..0f8dd54 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java @@ -18,20 +18,6 @@ package org.apache.hadoop.hive.metastore; -import org.apache.hadoop.hive.metastore.api.CompactionType; -import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse; -import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse; -import org.apache.hadoop.hive.metastore.api.LockRequest; -import org.apache.hadoop.hive.metastore.api.LockResponse; -import org.apache.hadoop.hive.metastore.api.NoSuchLockException; -import org.apache.hadoop.hive.metastore.api.NoSuchTxnException; -import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse; -import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; -import org.apache.hadoop.hive.metastore.api.ShowLocksResponse; -import org.apache.hadoop.hive.metastore.api.TxnAbortedException; -import org.apache.hadoop.hive.metastore.api.TxnOpenException; -import org.apache.thrift.TException; - import java.util.List; import java.util.Map; @@ -39,10 +25,15 @@ import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; +import org.apache.hadoop.hive.metastore.api.CompactionType; import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Function; +import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse; +import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse; +import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest; +import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleResponse; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.Index; @@ -50,15 +41,24 @@ import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.InvalidPartitionException; +import org.apache.hadoop.hive.metastore.api.LockRequest; +import org.apache.hadoop.hive.metastore.api.LockResponse; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchLockException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.NoSuchTxnException; +import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionEventType; import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; +import org.apache.hadoop.hive.metastore.api.ShowLocksResponse; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.api.TxnAbortedException; +import org.apache.hadoop.hive.metastore.api.TxnOpenException; import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.metastore.api.UnknownPartitionException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; @@ -1079,6 +1079,7 @@ public Function getFunction(String dbName, String funcName) * implementation needs to override the default implementation. This should produce a string * that can be used by {@link #fromString(String)} to populate a validTxnsList. */ + @Override public String toString(); /** @@ -1297,4 +1298,16 @@ public IncompatibleMetastoreException(String message) { super(message); } } + + /** + * get all role-grants for users/roles that have been granted the given role + * Note that in the returned list of RolePrincipalGrants, the roleName is + * redundant as it would match the role_name argument of this function + * @param getPrincRoleReq + * @return + * @throws MetaException + * @throws TException + */ + GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleRequest getPrincRoleReq) throws MetaException, + TException; } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 4c0c244..0c243ae 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -3105,7 +3105,7 @@ public boolean removeRole(String roleName) throws MetaException, if (mRol != null) { // first remove all the membership, the membership that this role has // been granted - List roleMap = listRoleMembers(mRol); + List roleMap = listRoleMembers(mRol.getRoleName()); if (roleMap.size() > 0) { pm.deletePersistentAll(roleMap); } @@ -4053,8 +4053,8 @@ public boolean revokePrivileges(PrivilegeBag privileges) } @SuppressWarnings("unchecked") - private List listRoleMembers( - MRole mRol) { + @Override + public List listRoleMembers(String roleName) { boolean success = false; List mRoleMemeberList = null; try { @@ -4065,7 +4065,7 @@ public boolean revokePrivileges(PrivilegeBag privileges) query.declareParameters("java.lang.String t1"); query.setUnique(false); mRoleMemeberList = (List) query.execute( - mRol.getRoleName()); + roleName); LOG.debug("Done executing query for listMSecurityUserRoleMember"); pm.retrieveAll(mRoleMemeberList); success = commitTransaction(); diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java index 05938ea..e0de0e0 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java @@ -24,7 +24,6 @@ import java.lang.annotation.Target; import java.util.List; import java.util.Map; -import java.util.Set; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; @@ -285,6 +284,15 @@ public abstract boolean revokePrivileges (PrivilegeBag privileges) public List listRoles(String principalName, PrincipalType principalType); + + /** + * Get the role to principal grant mapping for given role + * @param roleName + * @return + */ + public List listRoleMembers(String roleName); + + public abstract Partition getPartitionWithAuth(String dbName, String tblName, List partVals, String user_name, List group_names) throws MetaException, NoSuchObjectException, InvalidObjectException; diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java index 8d8ce97..5c00aa1 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java @@ -21,7 +21,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Set; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; @@ -459,6 +458,11 @@ public Role getRole(String roleName) throws NoSuchObjectException { } @Override + public List listRoleMembers(String roleName) { + return objectStore.listRoleMembers(roleName); + } + + @Override public Partition getPartitionWithAuth(String dbName, String tblName, List partVals, String userName, List groupNames) throws MetaException, NoSuchObjectException, InvalidObjectException { @@ -571,6 +575,7 @@ public boolean deleteTableColumnStatistics(String dbName, String tableName, return objectStore.deleteTableColumnStatistics(dbName, tableName, colName); } + @Override public boolean deletePartitionColumnStatistics(String dbName, String tableName, String partName, List partVals, String colName) throws NoSuchObjectException, MetaException, InvalidObjectException, @@ -586,6 +591,7 @@ public boolean updateTableColumnStatistics(ColumnStatistics statsObj) return objectStore.updateTableColumnStatistics(statsObj); } + @Override public boolean updatePartitionColumnStatistics(ColumnStatistics statsObj, List partVals) throws NoSuchObjectException, MetaException, InvalidObjectException, @@ -593,33 +599,41 @@ public boolean updatePartitionColumnStatistics(ColumnStatistics statsObj, return objectStore.updatePartitionColumnStatistics(statsObj, partVals); } + @Override public boolean addToken(String tokenIdentifier, String delegationToken) { return false; } + @Override public boolean removeToken(String tokenIdentifier) { return false; } + @Override public String getToken(String tokenIdentifier) { return ""; } + @Override public List getAllTokenIdentifiers() { return new ArrayList(); } + @Override public int addMasterKey(String key) throws MetaException { return -1; } + @Override public void updateMasterKey(Integer seqNo, String key) throws NoSuchObjectException, MetaException {} + @Override public boolean removeMasterKey(Integer keySeq) { return false; } + @Override public String[] getMasterKeys() { return new String[0]; } @@ -664,6 +678,7 @@ public void dropPartitions(String dbName, String tblName, List partNames objectStore.dropPartitions(dbName, tblName, partNames); } + @Override public void createFunction(Function func) throws InvalidObjectException, MetaException { objectStore.createFunction(func); @@ -694,4 +709,5 @@ public Function getFunction(String dbName, String funcName) return objectStore.getFunctions(dbName, pattern); } + } diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java index 397ae06..5025b83 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java @@ -20,7 +20,6 @@ import java.util.List; import java.util.Map; -import java.util.Set; import junit.framework.Assert; @@ -488,6 +487,11 @@ public Role getRole(String roleName) throws NoSuchObjectException { } @Override + public List listRoleMembers(String roleName) { + return null; + } + + @Override public Partition getPartitionWithAuth(String dbName, String tblName, List partVals, String user_name, List group_names) throws MetaException, NoSuchObjectException, InvalidObjectException { @@ -637,6 +641,7 @@ public boolean deleteTableColumnStatistics(String dbName, String tableName, } + @Override public boolean deletePartitionColumnStatistics(String dbName, String tableName, String partName, List partVals, String colName) throws NoSuchObjectException, MetaException, InvalidObjectException, @@ -651,6 +656,7 @@ public boolean updateTableColumnStatistics(ColumnStatistics statsObj) return false; } + @Override public boolean updatePartitionColumnStatistics(ColumnStatistics statsObj,List partVals) throws NoSuchObjectException, MetaException, InvalidObjectException { return false; @@ -692,6 +698,7 @@ public boolean addPartitions(String dbName, String tblName, List part public void dropPartitions(String dbName, String tblName, List partNames) { } + @Override public void createFunction(Function func) throws InvalidObjectException, MetaException { } @@ -718,6 +725,8 @@ public Function getFunction(String dbName, String funcName) throws MetaException { return null; } + + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index ab6f95d..2295b0d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -47,8 +47,11 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.fs.*; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FsShell; +import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -56,7 +59,26 @@ import org.apache.hadoop.hive.metastore.ProtectMode; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; -import org.apache.hadoop.hive.metastore.api.*; +import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; +import org.apache.hadoop.hive.metastore.api.HiveObjectType; +import org.apache.hadoop.hive.metastore.api.Index; +import org.apache.hadoop.hive.metastore.api.InvalidOperationException; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.Order; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; +import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.ShowLocksResponse; +import org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement; +import org.apache.hadoop.hive.metastore.api.SkewedInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -68,16 +90,31 @@ import org.apache.hadoop.hive.ql.io.rcfile.merge.MergeWork; import org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateTask; import org.apache.hadoop.hive.ql.io.rcfile.truncate.ColumnTruncateWork; -import org.apache.hadoop.hive.ql.lockmgr.*; +import org.apache.hadoop.hive.ql.lockmgr.DbLockManager; +import org.apache.hadoop.hive.ql.lockmgr.HiveLock; +import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager; +import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode; +import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject; import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData; -import org.apache.hadoop.hive.ql.metadata.*; +import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; +import org.apache.hadoop.hive.ql.metadata.CheckResult; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveMetaStoreChecker; +import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; +import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils; import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter; import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; -import org.apache.hadoop.hive.ql.plan.*; +import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; +import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc; +import org.apache.hadoop.hive.ql.plan.AlterIndexDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableExchangePartition; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; @@ -130,6 +167,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRole; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; @@ -897,7 +935,11 @@ private int roleDDL(RoleDDLDesc roleDDLDesc) throws HiveException, IOException { } outStream.close(); outStream = null; - } else { + } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLE_PRINCIPALS)) { + throw new HiveException("Show role principals is not currently supported in " + + "authorization mode V1"); + } + else { throw new HiveException("Unkown role operation " + operation.getOperationName()); } @@ -948,6 +990,11 @@ private int roleDDLV2(RoleDDLDesc roleDDLDesc) throws HiveException, IOException case SET_ROLE: authorizer.setCurrentRole(roleDDLDesc.getName()); break; + case SHOW_ROLE_PRINCIPALS: + testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); + List roleGrants = authorizer.getPrincipalsInRoleInfo(roleDDLDesc.getName()); + writeToFile(writeHiveRoleGrantInfo(roleGrants, testMode), roleDDLDesc.getResFile()); + break; default: throw new HiveException("Unkown role operation " + operation.getOperationName()); @@ -956,6 +1003,26 @@ private int roleDDLV2(RoleDDLDesc roleDDLDesc) throws HiveException, IOException return 0; } + private String writeHiveRoleGrantInfo(List roleGrants, boolean testMode) { + if (roleGrants == null || roleGrants.isEmpty()) { + return ""; + } + StringBuilder builder = new StringBuilder(); + // sort the list to get sorted (deterministic) output (for ease of testing) + Collections.sort(roleGrants); + for (HiveRoleGrant roleGrant : roleGrants) { + // schema: + // principal_name,principal_type,grant_option,grantor,grantor_type,grant_time + appendNonNull(builder, roleGrant.getPrincipalName(), true); + appendNonNull(builder, roleGrant.getPrincipalType()); + appendNonNull(builder, roleGrant.isGrantOption()); + appendNonNull(builder, roleGrant.getGrantor()); + appendNonNull(builder, roleGrant.getGrantorType()); + appendNonNull(builder, testMode ? -1 : roleGrant.getGrantTime() * 1000L); + } + return builder.toString(); + } + /** * Write list of string entries into given file * @param entries @@ -2652,7 +2719,7 @@ private int showLocksNewFormat(ShowLocksDesc showLocks, HiveLockManager lm) } catch (Exception e) { throw new HiveException(e.toString()); } finally { - IOUtils.closeStream((FSDataOutputStream) os); + IOUtils.closeStream(os); } return 0; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 31819d5..e642919 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -70,10 +70,9 @@ import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.lib.Node; -import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; -import org.apache.hadoop.hive.ql.lockmgr.LockException; -import org.apache.hadoop.hive.ql.lockmgr.TxnManagerFactory; -import org.apache.hadoop.hive.ql.metadata.*; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.authorization.AuthorizationParseUtils; @@ -442,6 +441,10 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowRoleGrant(ast); break; + case HiveParser.TOK_SHOW_ROLE_PRINCIPALS: + ctx.setResFile(ctx.getLocalTmpPath()); + analyzeShowRolePrincipals(ast); + break; case HiveParser.TOK_SHOW_ROLES: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowRoles(ast); @@ -553,7 +556,17 @@ private void analyzeShowRoleGrant(ASTNode ast) throws SemanticException { createShowRoleGrantTask(ast, ctx.getResFile(), getInputs(), getOutputs()); if(task != null) { rootTasks.add(task); - setFetchTask(createFetchTask(RoleDDLDesc.getRoleDescSchema())); + setFetchTask(createFetchTask(RoleDDLDesc.getRoleShowGrantSchema())); + } + } + + private void analyzeShowRolePrincipals(ASTNode ast) throws SemanticException { + Task roleDDLTask = (Task) hiveAuthorizationTaskFactory + .createShowRolePrincipalsTask(ast, ctx.getResFile(), getInputs(), getOutputs()); + + if (roleDDLTask != null) { + rootTasks.add(roleDDLTask); + setFetchTask(createFetchTask(RoleDDLDesc.getShowRolePrincipalsSchema())); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g index 3b9dddc..cdfa300 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g @@ -288,6 +288,8 @@ KW_INNER: 'INNER'; KW_EXCHANGE: 'EXCHANGE'; KW_ADMIN: 'ADMIN'; KW_OWNER: 'OWNER'; +KW_PRINCIPALS: 'PRINCIPALS'; + // Operators // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 9ed8a7f..29b496d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -284,6 +284,7 @@ TOK_REVOKE_ROLE; TOK_SHOW_ROLE_GRANT; TOK_SHOW_ROLES; TOK_SHOW_SET_ROLE; +TOK_SHOW_ROLE_PRINCIPALS; TOK_SHOWINDEXES; TOK_SHOWDBLOCKS; TOK_INDEXCOMMENT; @@ -677,6 +678,7 @@ ddlStatement | revokePrivileges | showGrants | showRoleGrants + | showRolePrincipals | showRoles | grantRole | revokeRole @@ -1389,6 +1391,7 @@ showRoleGrants -> ^(TOK_SHOW_ROLE_GRANT principalName) ; + showRoles @init {pushMsg("show roles", state);} @after {popMsg(state);} @@ -1417,6 +1420,14 @@ showGrants -> ^(TOK_SHOW_GRANT principalName? privilegeIncludeColObject?) ; +showRolePrincipals +@init {pushMsg("show role principals", state);} +@after {popMsg(state);} + : KW_SHOW KW_ROLE KW_PRINCIPALS roleName=identifier + -> ^(TOK_SHOW_ROLE_PRINCIPALS $roleName) + ; + + privilegeIncludeColObject @init {pushMsg("privilege object including columns", state);} @after {popMsg(state);} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g index 43d1f9b..ff18449 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g @@ -544,5 +544,5 @@ functionIdentifier nonReserved : - KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_PARQUETFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER + KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_PARQUETFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS ; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java index 82d48d0..5d16626 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java @@ -97,6 +97,7 @@ commandType.put(HiveParser.TOK_REVOKE_ROLE, HiveOperation.REVOKE_ROLE); commandType.put(HiveParser.TOK_SHOW_ROLES, HiveOperation.SHOW_ROLES); commandType.put(HiveParser.TOK_SHOW_SET_ROLE, HiveOperation.SHOW_ROLES); + commandType.put(HiveParser.TOK_SHOW_ROLE_PRINCIPALS, HiveOperation.SHOW_ROLE_PRINCIPALS); commandType.put(HiveParser.TOK_SHOW_ROLE_GRANT, HiveOperation.SHOW_ROLE_GRANT); commandType.put(HiveParser.TOK_ALTERDATABASE_PROPERTIES, HiveOperation.ALTERDATABASE); commandType.put(HiveParser.TOK_ALTERDATABASE_OWNER, HiveOperation.ALTERDATABASE_OWNER); @@ -212,6 +213,7 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_GRANT_ROLE: case HiveParser.TOK_REVOKE_ROLE: case HiveParser.TOK_SHOW_ROLE_GRANT: + case HiveParser.TOK_SHOW_ROLE_PRINCIPALS: case HiveParser.TOK_SHOW_ROLES: case HiveParser.TOK_ALTERDATABASE_PROPERTIES: case HiveParser.TOK_ALTERDATABASE_OWNER: @@ -219,6 +221,7 @@ public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) case HiveParser.TOK_TRUNCATETABLE: case HiveParser.TOK_EXCHANGEPARTITION: case HiveParser.TOK_SHOW_SET_ROLE: + return new DDLSemanticAnalyzer(conf); case HiveParser.TOK_ALTERTABLE_PARTITION: HiveOperation commandType = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java index bd6ef24..cc84731 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactory.java @@ -62,4 +62,7 @@ public Task createShowCurrentRoleTask(HashSet inputs, HashSet outputs, Path resFile) throws SemanticException; + + public Task createShowRolePrincipalsTask(ASTNode ast, Path resFile, + HashSet inputs, HashSet outputs) throws SemanticException; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java index 2c555af..d591163 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; import org.apache.hadoop.hive.ql.plan.RevokeDesc; import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; +import org.apache.hadoop.hive.ql.plan.RoleDDLDesc.RoleOperation; import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry; @@ -130,6 +131,7 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { principalDesc, userName, PrincipalType.USER, grantOption); return TaskFactory.get(new DDLWork(inputs, outputs, grantDesc), conf); } + @Override public Task createRevokeTask(ASTNode ast, HashSet inputs, HashSet outputs) throws SemanticException { @@ -334,4 +336,21 @@ private String toMessage(ErrorMsg message, Object detail) { ddlDesc.setResFile(resFile.toString()); return TaskFactory.get(new DDLWork(inputs, outputs, ddlDesc), conf); } + + @Override + public Task createShowRolePrincipalsTask(ASTNode ast, Path resFile, + HashSet inputs, HashSet outputs) throws SemanticException { + String roleName; + + if (ast.getChildCount() == 1) { + roleName = ast.getChild(0).getText(); + } else { + // the parser should not allow this + throw new AssertionError("Unexpected Tokens in SHOW ROLE PRINCIPALS"); + } + + RoleDDLDesc roleDDLDesc = new RoleDDLDesc(roleName, RoleOperation.SHOW_ROLE_PRINCIPALS); + roleDDLDesc.setResFile(resFile.toString()); + return TaskFactory.get(new DDLWork(inputs, outputs, roleDDLDesc), conf); + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java index 89c71ea..9b8a8e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java @@ -88,6 +88,7 @@ GRANT_ROLE("GRANT_ROLE", null, null), REVOKE_ROLE("REVOKE_ROLE", null, null), SHOW_ROLES("SHOW_ROLES", null, null), + SHOW_ROLE_PRINCIPALS("SHOW_ROLE_PRINCIPALS", null, null), SHOW_ROLE_GRANT("SHOW_ROLE_GRANT", null, null), ALTERTABLE_PROTECTMODE("ALTERTABLE_PROTECTMODE", new Privilege[]{Privilege.ALTER_METADATA}, null), ALTERPARTITION_PROTECTMODE("ALTERPARTITION_PROTECTMODE", new Privilege[]{Privilege.ALTER_METADATA}, null), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java index e3d2b4a..bc9d47e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java @@ -45,23 +45,35 @@ private static final String roleNameSchema = "role#string"; /** - * thrift ddl for the result of show role. + * thrift ddl for the result of show role grant principalName */ - private static final String roleDescSchema = + private static final String roleShowGrantSchema = "role,create_time,principal_name,principal_type,grant_option,grant_time,grantor#" + "string:bigint:string:string:boolean:bigint:string"; + /** + * thrift ddl for the result of describe role roleName + */ + private static final String roleShowRolePrincipals = + "principal_name,principal_type,grant_option,grantor,grantor_type,grant_time#" + + "string:string:boolean:string:string:bigint"; + public static String getRoleNameSchema() { return roleNameSchema; } - public static String getRoleDescSchema() { - return roleDescSchema; + public static String getRoleShowGrantSchema() { + return roleShowGrantSchema; + } + + public static String getShowRolePrincipalsSchema() { + return roleShowRolePrincipals; } public static enum RoleOperation { DROP_ROLE("drop_role"), CREATE_ROLE("create_role"), SHOW_ROLE_GRANT("show_role_grant"), - SHOW_ROLES("show_roles"), SET_ROLE("set_role"), SHOW_CURRENT_ROLE("show_current_role"); + SHOW_ROLES("show_roles"), SET_ROLE("set_role"), SHOW_CURRENT_ROLE("show_current_role"), + SHOW_ROLE_PRINCIPALS("show_role_principals"); private String operationName; private RoleOperation() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java index 863da70..50bd592 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java @@ -65,4 +65,6 @@ void revokeRole(List hivePrincipals, List roles, boolean void setCurrentRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException; List getCurrentRoles() throws HiveAuthzPluginException; + + List getPrincipalsInRoleInfo(String roleName) throws HiveAuthzPluginException, HiveAccessControlException; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java index 944985c..48064c4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java @@ -102,6 +102,18 @@ void dropRole(String roleName) List getRoles(HivePrincipal hivePrincipal) throws HiveAuthzPluginException, HiveAccessControlException; + + /** + * Get the grant information for principals granted the given role + * @param roleName + * @return + * @throws HiveAuthzPluginException + * @throws HiveAccessControlException + */ + List getPrincipalsInRoleInfo(String roleName) + throws HiveAuthzPluginException, HiveAccessControlException; + + /** * Grant roles in given roles list to principals in given hivePrincipals list * @param hivePrincipals diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java index 7b37cf9..2577ae5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java @@ -113,8 +113,10 @@ public void setCurrentRole(String roleName) throws HiveAccessControlException, H public List getCurrentRoles() throws HiveAuthzPluginException { return accessController.getCurrentRoles(); } - // other access control functions -// void validateAuthority(HiveAction, inputs, outputs){ -// authValidator.validateAuthority(HiveAction, inputs, outputs); -// } + + @Override + public List getPrincipalsInRoleInfo(String roleName) + throws HiveAuthzPluginException, HiveAccessControlException { + return accessController.getPrincipalsInRoleInfo(roleName); + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java index 15465b1..774347d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java @@ -91,6 +91,7 @@ REVOKE_ROLE, SHOW_ROLES, SHOW_ROLE_GRANT, + SHOW_ROLE_PRINCIPALS, ALTERTABLE_PROTECTMODE, ALTERPARTITION_PROTECTMODE, ALTERTABLE_FILEFORMAT, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRoleGrant.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRoleGrant.java new file mode 100644 index 0000000..03f129a --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRoleGrant.java @@ -0,0 +1,126 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate; +import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; +import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant; + +import com.google.common.collect.ComparisonChain; + +/** + * Represents a grant of a role to a principal + */ +@LimitedPrivate(value = { "" }) +@Evolving +public class HiveRoleGrant implements Comparable { + + private String roleName; + private String principalName; + private String principalType; + private boolean grantOption; + private int grantTime; + private String grantor; + private String grantorType; + + public HiveRoleGrant() {} + + public HiveRoleGrant(RolePrincipalGrant thriftRoleGrant) { + this.roleName = thriftRoleGrant.getRoleName(); + this.principalName = thriftRoleGrant.getPrincipalName(); + this.principalType = thriftRoleGrant.getPrincipalType().name(); + this.grantOption = thriftRoleGrant.isGrantOption(); + this.grantTime = thriftRoleGrant.getGrantTime(); + this.grantor = thriftRoleGrant.getGrantorName(); + this.grantorType = thriftRoleGrant.getGrantorPrincipalType().name(); + + } + + public String getRoleName() { + return roleName; + } + + public void setRoleName(String roleName) { + this.roleName = roleName; + } + + public String getPrincipalName() { + return principalName; + } + + public void setPrincipalName(String principalName) { + this.principalName = principalName; + } + + public String getPrincipalType() { + return principalType; + } + + public void setPrincipalType(String principalType) { + this.principalType = principalType; + } + + public boolean isGrantOption() { + return grantOption; + } + + public void setGrantOption(boolean grantOption) { + this.grantOption = grantOption; + } + + public int getGrantTime() { + return grantTime; + } + + public void setGrantTime(int grantTime) { + this.grantTime = grantTime; + } + + public String getGrantor() { + return grantor; + } + + public void setGrantor(String grantor) { + this.grantor = grantor; + } + + public String getGrantorType() { + return grantorType; + } + + public void setGrantorType(String grantorType) { + this.grantorType = grantorType; + } + + @Override + public int compareTo(HiveRoleGrant other) { + if(other == null){ + return 1; + } + return ComparisonChain.start().compare(roleName, other.roleName) + .compare(principalName, other.principalName) + .compare(principalType, other.principalType) + .compare(grantOption, other.grantOption) + .compare(grantTime, other.grantTime) + .compare(grantor, other.grantor) + .result(); + + } + + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java index 81f256d..a527be2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java @@ -199,6 +199,9 @@ op2Priv.put(HiveOperationType.SHOW_ROLES, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.SHOW_ROLE_GRANT, new InOutPrivs(null, null)); + op2Priv.put(HiveOperationType.SHOW_ROLE_PRINCIPALS, new InOutPrivs(null, + null)); + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java index c91e6b8..f69e41b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java @@ -28,6 +28,8 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest; +import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleResponse; import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.HiveObjectRef; import org.apache.hadoop.hive.metastore.api.HiveObjectType; @@ -36,6 +38,7 @@ import org.apache.hadoop.hive.metastore.api.PrivilegeBag; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; @@ -49,6 +52,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRole; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant; import org.apache.thrift.TException; /** @@ -371,6 +375,28 @@ public void revokeRole(List hivePrincipals, List roleName } } + + @Override + public List getPrincipalsInRoleInfo(String roleName) throws HiveAuthzPluginException, HiveAccessControlException { + // only user belonging to admin role can list role + if (!isUserAdmin()) { + throw new HiveAccessControlException("Current user : " + currentUserName+ " is not" + + " allowed get principals in a role. " + ADMIN_ONLY_MSG); + } + try { + GetPrincipalsInRoleResponse princGrantInfo = + metastoreClientFactory.getHiveMetastoreClient().get_principals_in_role(new GetPrincipalsInRoleRequest(roleName)); + + List hiveRoleGrants = new ArrayList(); + for(RolePrincipalGrant thriftRoleGrant : princGrantInfo.getPrincipalGrants()){ + hiveRoleGrants.add(new HiveRoleGrant(thriftRoleGrant)); + } + return hiveRoleGrants; + } catch (Exception e) { + throw new HiveAuthzPluginException("Error getting principals for all roles", e); + } + } + @Override public List showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj) throws HiveAuthzPluginException { @@ -511,4 +537,5 @@ private boolean doesUserHasAdminOption(List roleNames) throws HiveAuthzP } return true; } + } diff --git a/ql/src/test/queries/clientnegative/authorization_show_role_principals_no_admin.q b/ql/src/test/queries/clientnegative/authorization_show_role_principals_no_admin.q new file mode 100644 index 0000000..d4ce744 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_show_role_principals_no_admin.q @@ -0,0 +1,3 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +-- This test will fail because hive_test_user is not in admin role +show role principals role1; diff --git a/ql/src/test/queries/clientnegative/authorization_show_role_principals_v1.q b/ql/src/test/queries/clientnegative/authorization_show_role_principals_v1.q new file mode 100644 index 0000000..d563383 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_show_role_principals_v1.q @@ -0,0 +1,2 @@ +-- This test will fail because the command is not currently supported in auth mode v1 +show role principals role1; diff --git a/ql/src/test/queries/clientpositive/authorization_role_grant2.q b/ql/src/test/queries/clientpositive/authorization_role_grant2.q index 04fda84..8fb299e 100644 --- a/ql/src/test/queries/clientpositive/authorization_role_grant2.q +++ b/ql/src/test/queries/clientpositive/authorization_role_grant2.q @@ -1,8 +1,8 @@ set hive.users.in.admin.role=hive_admin_user; set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; -set user.name=hive_admin_user; +set user.name=hive_admin_user; set role ADMIN; ---------------------------------------- @@ -12,10 +12,22 @@ set role ADMIN; create role src_role_wadmin; grant src_role_wadmin to user user2 with admin option; show role grant user user2; +show role principals src_role_wadmin; set user.name=user2; set role src_role_wadmin; grant src_role_wadmin to user user3; show role grant user user3; + +set user.name=hive_admin_user; +set role ADMIN; +show role principals src_role_wadmin; + +set user.name=user2; +set role src_role_wadmin; revoke src_role_wadmin from user user3; show role grant user user3; + +set user.name=hive_admin_user; +set role ADMIN; +show role principals src_role_wadmin; diff --git a/ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out b/ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out new file mode 100644 index 0000000..2555d67 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_show_role_principals_no_admin.q.out @@ -0,0 +1,4 @@ +PREHOOK: query: -- This test will fail because hive_test_user is not in admin role +show role principals role1 +PREHOOK: type: SHOW_ROLE_PRINCIPALS +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Current user : hive_test_user is not allowed get principals in a role. User has to belong to ADMIN role and have it as current role, for this action. diff --git a/ql/src/test/results/clientnegative/authorization_show_role_principals_v1.q.out b/ql/src/test/results/clientnegative/authorization_show_role_principals_v1.q.out new file mode 100644 index 0000000..3627e63 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_show_role_principals_v1.q.out @@ -0,0 +1,5 @@ +PREHOOK: query: -- This test will fail because the command is not currently supported in auth mode v1 +show role principals role1 +PREHOOK: type: SHOW_ROLE_PRINCIPALS +Error in role operation show_role_principals on role name role1, error message Show role principals is not currently supported in authorization mode V1 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask diff --git a/ql/src/test/results/clientpositive/authorization_role_grant2.q.out b/ql/src/test/results/clientpositive/authorization_role_grant2.q.out index 81ef8d5..c1f15f0 100644 --- a/ql/src/test/results/clientpositive/authorization_role_grant2.q.out +++ b/ql/src/test/results/clientpositive/authorization_role_grant2.q.out @@ -24,6 +24,11 @@ POSTHOOK: query: show role grant user user2 POSTHOOK: type: SHOW_ROLE_GRANT PUBLIC -1 false -1 src_role_wadmin -1 user2 USER true -1 hive_admin_user +PREHOOK: query: show role principals src_role_wadmin +PREHOOK: type: SHOW_ROLE_PRINCIPALS +POSTHOOK: query: show role principals src_role_wadmin +POSTHOOK: type: SHOW_ROLE_PRINCIPALS +user2 USER true hive_admin_user USER -1 PREHOOK: query: set role src_role_wadmin PREHOOK: type: SHOW_ROLES POSTHOOK: query: set role src_role_wadmin @@ -38,6 +43,20 @@ POSTHOOK: query: show role grant user user3 POSTHOOK: type: SHOW_ROLE_GRANT PUBLIC -1 false -1 src_role_wadmin -1 user3 USER false -1 user2 +PREHOOK: query: set role ADMIN +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role ADMIN +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: show role principals src_role_wadmin +PREHOOK: type: SHOW_ROLE_PRINCIPALS +POSTHOOK: query: show role principals src_role_wadmin +POSTHOOK: type: SHOW_ROLE_PRINCIPALS +user2 USER true hive_admin_user USER -1 +user3 USER false user2 USER -1 +PREHOOK: query: set role src_role_wadmin +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role src_role_wadmin +POSTHOOK: type: SHOW_ROLES PREHOOK: query: revoke src_role_wadmin from user user3 PREHOOK: type: REVOKE_ROLE POSTHOOK: query: revoke src_role_wadmin from user user3 @@ -47,3 +66,12 @@ PREHOOK: type: SHOW_ROLE_GRANT POSTHOOK: query: show role grant user user3 POSTHOOK: type: SHOW_ROLE_GRANT PUBLIC -1 false -1 +PREHOOK: query: set role ADMIN +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role ADMIN +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: show role principals src_role_wadmin +PREHOOK: type: SHOW_ROLE_PRINCIPALS +POSTHOOK: query: show role principals src_role_wadmin +POSTHOOK: type: SHOW_ROLE_PRINCIPALS +user2 USER true hive_admin_user USER -1