Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1030336) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -306,6 +306,10 @@ HIVEFETCHOUTPUTSERDE("hive.fetch.output.serde", "org.apache.hadoop.hive.serde2.DelimitedJSONSerDe"), SEMANTIC_ANALYZER_HOOK("hive.semantic.analyzer.hook",null), + + HIVE_AUTHORIZATION_ENABLED("hive.security.authorization.enabled", false), + HIVE_AUTHORIZATION_MANAGER("hive.security.authorization.manager", null), + HIVE_AUTHENTICATOR_MANAGER("hive.security.authenticator.manager", null), ; Index: metastore/if/hive_metastore.thrift =================================================================== --- metastore/if/hive_metastore.thrift (revision 1033775) +++ metastore/if/hive_metastore.thrift (working copy) @@ -29,11 +29,24 @@ 4: optional list fields // if the name is one of the user defined types } +struct PrincipalPrivilegeSet { + 1: map userPrivileges, // user name -> privilege set + 2: map groupPrivileges, // group name -> privilege set + 3: map rolePrivileges, //role name -> privilege set +} + +struct Role { + 1: string roleName, + 2: i32 createTime, + 3: string ownerName, +} + // namespace for tables struct Database { 1: string name, 2: string description, 3: string locationUri, + 4: optional PrincipalPrivilegeSet privileges } // This object holds the information needed by SerDes @@ -76,7 +89,8 @@ 9: map parameters, // to store comments or any other user level parameters 10: string viewOriginalText, // original view text, null for non-view 11: string viewExpandedText, // expanded view text, null for non-view - 12: string tableType // table type enum, e.g. EXTERNAL_TABLE + 12: string tableType, // table type enum, e.g. EXTERNAL_TABLE + 13: optional PrincipalPrivilegeSet privileges, } struct Partition { @@ -86,7 +100,8 @@ 4: i32 createTime, 5: i32 lastAccessTime, 6: StorageDescriptor sd, - 7: map parameters + 7: map parameters, + 8: optional PrincipalPrivilegeSet privileges } struct Index { @@ -109,6 +124,60 @@ 2: map properties } +struct ColumnPrivilegeBag { + 1: string dbName, + 2: string tableName, + 3: map columnPrivileges +} + +struct PrivilegeBag { + 1: string userPrivileges, //user privileges + 2: map dbPrivileges, //database privileges + 3: map tablePrivileges, //table privileges + 4: map partitionPrivileges, //table privileges + 5: list columnPrivileges, //column privileges +} + +struct SecurityUser { + 1: string principalName, + 2: bool isRole, + 3: bool isGroup, + 4: string privileges, + 5: i32 createTime, + 6: string grantor, +} + +struct SecurityDB { + 1: string principalName, + 2: bool isRole, + 3: bool isGroup, + 4: string privileges, + 5: i32 createTime, + 6: string grantor, + 7: Database db, +} + +struct SecurityTablePartition { + 1: string principalName, + 2: bool isRole, + 3: bool isGroup, + 4: string privileges, + 5: i32 createTime, + 6: string grantor, + 7: Table table, + 8: Partition part, +} + +struct SecurityColumn { + 1: string principalName, + 2: bool isRole, + 3: bool isGroup, + 4: string privileges, + 5: i32 createTime, + 6: string grantor, + 7: Table table, + 8: string column, +} exception MetaException { 1: string message @@ -269,6 +338,44 @@ throws(1:NoSuchObjectException o1, 2:MetaException o2) list get_index_names(1:string db_name, 2:string tbl_name, 3:i16 max_indexes=-1) throws(1:MetaException o2) + + //authorization privileges + PrincipalPrivilegeSet get_user_privilege_set (1: string user_name, 2: list group_names) + throws(1:MetaException o1) + PrincipalPrivilegeSet get_db_privilege_set (1: string db_name, 2: string user_name, 3: list group_names) + throws(1:MetaException o1) + PrincipalPrivilegeSet get_table_privilege_set (1: string db_name, 2: string table_name, 3: string user_name, 4: list group_names) + throws(1:MetaException o1) + PrincipalPrivilegeSet get_partition_privilege_set (1: string db_name, 2: string table_name, 3: string part_name, 4: string user_name, 5: list group_names) + throws(1:MetaException o1) + PrincipalPrivilegeSet get_column_privilege_set (1: string db_name, 2: string table_name, 3: string part_name, 4: string column_name, 5: string user_name, 6: list group_names) + throws(1:MetaException o1) + bool create_role(1: string role_name, 2: string owner_name) throws(1:MetaException o1) + + bool drop_role(1: string role_name) throws(1:MetaException o1) + + bool add_role_member (1: string role_name, 2: string user_name, 3: bool is_role, 4: bool is_group) throws(1:MetaException o1) + + bool remove_role_member (1: string role_name, 2: string user_name, 3: bool is_role, 4: bool is_group) throws(1:MetaException o1) + + list list_roles(1: string principal_name, 2: bool is_role, 3: bool is_group) throws(1:MetaException o1) + + list list_security_user_grant(1: string principla_name, 2: bool is_role, 3: bool is_group) throws(1:MetaException o1) + + list list_security_db_grant(1: string principal_name, 2: bool is_group, 3: bool is_role, 4: string db_name) throws(1:MetaException o1) + + list list_security_table_grant(1: string principal_name, 2: bool is_group, 3: bool is_role, 4: string db_name, 5: string table_name) throws(1:MetaException o1) + + list list_security_partition_grant(1: string principal_name, 2: bool is_group, 3: bool is_role, 4: string db_name, 5: string table_name, 6: string part_name) throws(1:MetaException o1) + + list list_security_column_grant(1: string principal_name, 2: bool is_group, 3: bool is_role, 4: string db_name, 5: string table_name, 6: string column_name) throws(1:MetaException o1) + + bool grant_privileges (1: string user_name, 2: bool is_role, 3: bool is_group, 4: PrivilegeBag privileges, 5: string grantor) throws(1:MetaException o1) + + bool revoke_privileges (1: string user_name, 2: bool is_role, 3: bool is_group, 4: PrivilegeBag privileges) throws(1:MetaException o1) + + bool revoke_all_privileges (1: string user_name, 2: bool is_role, 3: bool is_group, 4: bool remove_user_priv, 5: list dbs, + 6: list tables, 7: list parts, 8: map> columns) throws(1:MetaException o1) } // * Note about the DDL_TIME: When creating or altering a table or a partition, Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (revision 1033775) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (working copy) @@ -48,12 +48,24 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.Role; +import org.apache.hadoop.hive.metastore.api.SecurityColumn; +import org.apache.hadoop.hive.metastore.api.SecurityDB; +import org.apache.hadoop.hive.metastore.api.SecurityTablePartition; +import org.apache.hadoop.hive.metastore.api.SecurityUser; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore; import org.apache.hadoop.hive.metastore.api.Type; import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; import org.apache.hadoop.hive.metastore.hooks.JDOConnectionURLHook; +import org.apache.hadoop.hive.metastore.model.MSecurityColumn; +import org.apache.hadoop.hive.metastore.model.MSecurityDB; +import org.apache.hadoop.hive.metastore.model.MSecurityTablePartition; +import org.apache.hadoop.hive.metastore.model.MSecurityUser; +import org.apache.hadoop.hive.metastore.model.MSecurityUserRoleMap; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; @@ -360,7 +372,7 @@ } catch (NoSuchObjectException e) { ms.createDatabase( new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT, - wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString())); + wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null)); } HMSHandler.createDefaultDB = true; } @@ -2136,6 +2148,493 @@ return ret; } + @Override + public PrincipalPrivilegeSet get_column_privilege_set(final String dbName, + final String tableName, final String partName, final String columnName, + final String userName, final List groupNames) throws MetaException, + TException { + incrementCounter("get_column_privilege_set"); + + PrincipalPrivilegeSet ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + PrincipalPrivilegeSet run(RawStore ms) throws Exception { + return ms.getColumnPrivilegeSet(dbName, tableName, partName, columnName, userName, groupNames); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public PrincipalPrivilegeSet get_db_privilege_set(final String dbName, + final String userName, final List groupNames) throws MetaException, + TException { + incrementCounter("get_db_privilege_set"); + + PrincipalPrivilegeSet ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + PrincipalPrivilegeSet run(RawStore ms) throws Exception { + return ms.getDBPrivilegeSet(dbName, userName, groupNames); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public PrincipalPrivilegeSet get_partition_privilege_set( + final String dbName, final String tableName, final String partName, + final String userName, final List groupNames) + throws MetaException, TException { + incrementCounter("get_partition_privilege_set"); + + PrincipalPrivilegeSet ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + PrincipalPrivilegeSet run(RawStore ms) throws Exception { + return ms.getPartitionPrivilegeSet(dbName, tableName, partName, + userName, groupNames); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public PrincipalPrivilegeSet get_table_privilege_set(final String dbName, + final String tableName, final String userName, + final List groupNames) throws MetaException, TException { + incrementCounter("get_table_privilege_set"); + + PrincipalPrivilegeSet ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + PrincipalPrivilegeSet run(RawStore ms) throws Exception { + return ms.getTablePrivilegeSet(dbName, tableName, userName, + groupNames); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public boolean add_role_member(final String roleName, final String userName, + final boolean isRole, final boolean isGroup) throws MetaException, TException { + incrementCounter("add_role_member"); + + Boolean ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + Boolean run(RawStore ms) throws Exception { + Role role = ms.getRole(roleName); + return ms.addRoleMember(role, userName, isRole, isGroup); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + public List list_roles(final String principalName, + final boolean isRole, final boolean isGroup) throws MetaException, TException { + incrementCounter("list_roles"); + + List ret = null; + try { + ret = executeWithRetry(new Command>() { + @Override + List run(RawStore ms) throws Exception { + List result = new ArrayList(); + List roleMap = ms.listRoles(principalName, isRole, isGroup); + if (roleMap!=null) { + for (MSecurityUserRoleMap role : roleMap) { + result.add(role.getRole().getRoleName()); + } + } + return result; + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public boolean create_role(final String roleName, final String ownerName) + throws MetaException, TException { + incrementCounter("create_role"); + + Boolean ret = null; + try { + + ret = executeWithRetry(new Command() { + @Override + Boolean run(RawStore ms) throws Exception { + return ms.addRole(roleName, ownerName); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public boolean drop_role(final String roleName) + throws MetaException, TException { + incrementCounter("drop_role"); + + Boolean ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + Boolean run(RawStore ms) throws Exception { + return ms.removeRole(roleName); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public boolean grant_privileges(final String userName, final boolean isRole, + final boolean isGroup, final PrivilegeBag privileges, final String grantor) throws MetaException, + TException { + incrementCounter("grant_privileges"); + + Boolean ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + Boolean run(RawStore ms) throws Exception { + return ms.grantPrivileges(userName, isRole, isGroup, privileges, grantor); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public boolean remove_role_member(final String roleName, final String userName, + final boolean isRole, final boolean isGroup) throws MetaException, TException { + incrementCounter("remove_role_member"); + + Boolean ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + Boolean run(RawStore ms) throws Exception { + Role mRole = ms.getRole(roleName); + return ms.removeRoleMember(mRole, userName, isRole, isGroup); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public boolean revoke_privileges(final String userName, final boolean isRole, + final boolean isGroup, final PrivilegeBag privileges) throws MetaException, + TException { + incrementCounter("revoke_privileges"); + + Boolean ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + Boolean run(RawStore ms) throws Exception { + return ms.revokePrivileges(userName, isRole, isGroup, privileges); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public PrincipalPrivilegeSet get_user_privilege_set(final String userName, + final List groupNames) throws MetaException, TException { + incrementCounter("get_user_privilege_set"); + + PrincipalPrivilegeSet ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + PrincipalPrivilegeSet run(RawStore ms) throws Exception { + return ms.getUserPrivilegeSet(userName, groupNames); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public boolean revoke_all_privileges(final String userName, + final boolean isRole, final boolean isGroup, + final boolean removeUserPriv, final List dbs, + final List
tables, final List parts, + final Map> columns) throws MetaException, + TException { + incrementCounter("revoke_all_privileges"); + + Boolean ret = null; + try { + ret = executeWithRetry(new Command() { + @Override + Boolean run(RawStore ms) throws Exception { + return ms.revokeAllPrivileges(userName, isRole, isGroup, + removeUserPriv, dbs, tables, parts, columns); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public List list_security_column_grant( + final String principalName, final boolean isGroup, + final boolean isRole, final String dbName, final String tableName, + final String columnName) throws MetaException, TException { + incrementCounter("list_security_column_grant"); + + List ret = null; + try { + ret = executeWithRetry(new Command>() { + @Override + List run(RawStore ms) throws Exception { + List mCols = ms.listMSecurityPrincipalColumnGrant( + principalName, isGroup, isRole, dbName, tableName, columnName); + Table tbl = ms.getTable(dbName, tableName); + if (mCols.size() > 0) { + List result = new ArrayList(); + for (int i = 0; i < mCols.size(); i++) { + MSecurityColumn sCol = mCols.get(i); + SecurityColumn col = new SecurityColumn( + sCol.getPrincipalName(), sCol.getIsRole(), sCol + .getIsGroup(), sCol.getPrivileges(), sCol + .getCreateTime(), sCol.getGrantor(), tbl, sCol + .getColumnName()); + result.add(col); + } + return result; + } + return null; + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public List list_security_db_grant(final String principalName, + final boolean isGroup, final boolean isRole, final String dbName) + throws MetaException, TException { + incrementCounter("list_security_db_grant"); + + List ret = null; + try { + ret = executeWithRetry(new Command>() { + @Override + List run(RawStore ms) throws Exception { + List mDbs = ms.listMSecurityPrincipalDBGrant( + principalName, isGroup, isRole, dbName); + Database db = ms.getDatabase(dbName); + if (mDbs.size() > 0) { + List result = new ArrayList(); + for (int i = 0; i < mDbs.size(); i++) { + MSecurityDB sDB = mDbs.get(i); + SecurityDB secdb = new SecurityDB(sDB.getPrincipalName(), sDB + .getIsRole(), sDB.getIsGroup(), sDB.getPrivileges(), sDB + .getCreateTime(), sDB.getGrantor(), db); + result.add(secdb); + } + return result; + } + return null; + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public List list_security_partition_grant( + final String principalName, final boolean isGroup, + final boolean isRole, final String dbName, final String tableName, + final String partName) throws MetaException, TException { + incrementCounter("list_security_partition_grant"); + + List ret = null; + try { + ret = executeWithRetry(new Command>() { + @Override + List run(RawStore ms) throws Exception { + List mParts = ms + .listMSecurityPrincipalPartitionGrant(principalName, isGroup, + isRole, dbName, tableName, partName); + Partition partObj = get_partition_by_name(dbName, tableName, + partName); + if (mParts.size() > 0) { + List result = new ArrayList(); + for (int i = 0; i < mParts.size(); i++) { + MSecurityTablePartition sPart = mParts.get(i); + SecurityTablePartition secPart = new SecurityTablePartition( + sPart.getPrincipalName(), sPart.getIsRole(), sPart + .getIsGroup(), sPart.getPrivileges(), sPart + .getCreateTime(), sPart.getGrantor(), null, partObj); + result.add(secPart); + } + return result; + } + return null; + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public List list_security_table_grant( + final String principalName, final boolean isGroup, + final boolean isRole, final String dbName, final String tableName) + throws MetaException, TException { + incrementCounter("list_security_table_grant"); + + List ret = null; + try { + ret = executeWithRetry(new Command>() { + @Override + List run(RawStore ms) throws Exception { + List mTbls = ms + .listMSecurityPrincipalTableGrant(principalName, isGroup, + isRole, dbName, tableName); + Table tblObj = ms.getTable(dbName, tableName); + if (mTbls.size() > 0) { + List result = new ArrayList(); + for (int i = 0; i < mTbls.size(); i++) { + MSecurityTablePartition sTbl = mTbls.get(i); + SecurityTablePartition secPart = new SecurityTablePartition( + sTbl.getPrincipalName(), sTbl.getIsRole(), sTbl + .getIsGroup(), sTbl.getPrivileges(), sTbl + .getCreateTime(), sTbl.getGrantor(), tblObj, null); + result.add(secPart); + } + return result; + } + return null; + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + + @Override + public List list_security_user_grant( + final String principlaName, final boolean isRole, final boolean isGroup) + throws MetaException, TException { + incrementCounter("list_security_user_grant"); + + List ret = null; + try { + ret = executeWithRetry(new Command>() { + @Override + List run(RawStore ms) throws Exception { + List mUsers = ms.listMSecurityPrincipalUserGrant( + principlaName, isRole, isGroup); + if (mUsers.size() > 0) { + List result = new ArrayList(); + for (int i = 0; i < mUsers.size(); i++) { + MSecurityUser sUsr = mUsers.get(i); + SecurityUser secUser = new SecurityUser( + sUsr.getPrincipalName(), sUsr.getIsRole(), sUsr + .getIsGroup(), sUsr.getPrivileges(), sUsr + .getCreateTime(), sUsr.getGrantor()); + result.add(secUser); + } + return result; + } + return null; + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + return ret; + } + } /** Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (revision 1030336) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (working copy) @@ -40,6 +40,12 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.SecurityColumn; +import org.apache.hadoop.hive.metastore.api.SecurityDB; +import org.apache.hadoop.hive.metastore.api.SecurityTablePartition; +import org.apache.hadoop.hive.metastore.api.SecurityUser; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore; import org.apache.hadoop.hive.metastore.api.Type; @@ -867,4 +873,132 @@ return client.drop_index_by_name(dbName, tblName, name, deleteData); } + @Override + public boolean add_role_member(String roleName, String userName, + boolean isRole, boolean isGroup) throws MetaException, TException { + return client.add_role_member(roleName, userName, isRole, isGroup); + } + + @Override + public boolean create_role(String roleName, String ownerName) + throws MetaException, TException { + return client.create_role(roleName, ownerName); + } + + @Override + public boolean drop_role(String roleName) throws MetaException, TException { + return client.drop_role(roleName); + } + + @Override + public List list_roles(String principalName, + boolean isRole, boolean isGroup) throws MetaException, TException { + return client.list_roles(principalName, isRole, isGroup); + } + + @Override + public PrincipalPrivilegeSet get_column_privilege_set(String dbName, + String tableName, String partName, String columnName, String userName, + List groupNames) throws MetaException, TException { + return client.get_column_privilege_set(dbName, tableName, partName, + columnName, userName, groupNames); + } + + @Override + public PrincipalPrivilegeSet get_db_privilege_set(String dbName, + String userName, List groupNames) throws MetaException, + TException { + return client.get_db_privilege_set(dbName, userName, groupNames); + } + + @Override + public PrincipalPrivilegeSet get_partition_privilege_set(String dbName, + String tableName, String partName, String userName, + List groupNames) throws MetaException, TException { + return client.get_partition_privilege_set(dbName, tableName, partName, + userName, groupNames); + } + + @Override + public PrincipalPrivilegeSet get_table_privilege_set(String dbName, + String tableName, String userName, List groupNames) + throws MetaException, TException { + return client.get_table_privilege_set(dbName, tableName, userName, + groupNames); + } + + @Override + public PrincipalPrivilegeSet get_user_privilege_set(String userName, + List groupNames) throws MetaException, TException { + return client.get_user_privilege_set(userName, groupNames); + } + + @Override + public boolean grant_privileges(String userName, boolean isRole, + boolean isGroup, PrivilegeBag privileges, String grantor) + throws MetaException, TException { + return client.grant_privileges(userName, isRole, isGroup, privileges, + grantor); + } + + @Override + public boolean remove_role_member(String roleName, String userName, + boolean isRole, boolean isGroup) throws MetaException, TException { + return client.remove_role_member(roleName, userName, isRole, isGroup); + } + + @Override + public boolean revoke_all_privileges(String userName, boolean isRole, + boolean isGroup, boolean removeUserPriv, List dbs, + List
tables, List parts, + Map> columns) throws MetaException, TException { + return client.revoke_all_privileges(userName, isRole, isGroup, + removeUserPriv, dbs, tables, parts, columns); + } + + @Override + public boolean revoke_privileges(String userName, boolean isRole, + boolean isGroup, PrivilegeBag privileges) throws MetaException, + TException { + return client.revoke_privileges(userName, isRole, isGroup, privileges); + } + + @Override + public List list_security_column_grant(String principalName, + boolean isGroup, boolean isRole, String dbName, String tableName, + String columnName) throws MetaException, TException { + return client.list_security_column_grant(principalName, isGroup, isRole, + dbName, tableName, columnName); + } + + @Override + public List list_security_db_grant(String principalName, + boolean isGroup, boolean isRole, String dbName) throws MetaException, + TException { + return client + .list_security_db_grant(principalName, isGroup, isRole, dbName); + } + + @Override + public List list_security_partition_grant( + String principalName, boolean isGroup, boolean isRole, String dbName, + String tableName, String partName) throws MetaException, TException { + return client.list_security_partition_grant(principalName, isGroup, isRole, + dbName, tableName, partName); + } + + @Override + public List list_security_table_grant( + String principalName, boolean isGroup, boolean isRole, String dbName, + String tableName) throws MetaException, TException { + return client.list_security_table_grant(principalName, isGroup, isRole, + dbName, tableName); + } + + @Override + public List list_security_user_grant(String principlaName, + boolean isRole, boolean isGroup) throws MetaException, TException { + return client.list_security_user_grant(principlaName, isRole, isGroup); + } + } Index: metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (revision 1030336) +++ metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java (working copy) @@ -31,6 +31,12 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.SecurityColumn; +import org.apache.hadoop.hive.metastore.api.SecurityDB; +import org.apache.hadoop.hive.metastore.api.SecurityTablePartition; +import org.apache.hadoop.hive.metastore.api.SecurityUser; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.UnknownDBException; import org.apache.hadoop.hive.metastore.api.UnknownTableException; @@ -475,4 +481,289 @@ public boolean dropIndex(String db_name, String tbl_name, String name, boolean deleteData) throws NoSuchObjectException, MetaException, TException; + + /** + * @param user_name + * user name + * @param group_names + * group names + * @return + * @throws MetaException + * @throws TException + */ + public PrincipalPrivilegeSet get_user_privilege_set(String user_name, + List group_names) throws MetaException, TException; + + /** + * @param db_name + * database name + * @param user_name + * user name + * @param group_names + * group names + * @return + * @throws MetaException + * @throws TException + */ + public PrincipalPrivilegeSet get_db_privilege_set(String db_name, + String user_name, List group_names) throws MetaException, + TException; + + /** + * @param db_name + * db name + * @param table_name + * table name + * @param user_name + * user name + * @param group_names + * group names + * @return + * @throws MetaException + * @throws TException + */ + public PrincipalPrivilegeSet get_table_privilege_set(String db_name, + String table_name, String user_name, List group_names) + throws MetaException, TException; + + /** + * @param db_name + * db name + * @param table_name + * table name + * @param part_name + * partition name + * @param user_name + * user name + * @param group_names + * group names + * @return + * @throws MetaException + * @throws TException + */ + public PrincipalPrivilegeSet get_partition_privilege_set(String db_name, + String table_name, String part_name, String user_name, + List group_names) throws MetaException, TException; + + /** + * @param db_name + * database name + * @param table_name + * table name + * @param part_name + * partition name + * @param column_name + * column name + * @param user_name + * user name + * @param group_names + * group names + * @return + * @throws MetaException + * @throws TException + */ + public PrincipalPrivilegeSet get_column_privilege_set(String db_name, + String table_name, String part_name, String column_name, + String user_name, List group_names) throws MetaException, + TException; + + /** + * @param role_name + * role name + * @param owner_name + * owner name + * @return + * @throws MetaException + * @throws TException + */ + public boolean create_role(String role_name, String owner_name) + throws MetaException, TException; + + /** + * @param role_name + * role name + * @return + * @throws MetaException + * @throws TException + */ + public boolean drop_role(String role_name) throws MetaException, TException; + + /** + * @param role_name + * role name + * @param user_name + * user name + * @param is_role + * is the given user name a role name + * @param is_group + * is the given user name a group name + * @return + * @throws MetaException + * @throws TException + */ + public boolean add_role_member(String role_name, String user_name, + boolean is_role, boolean is_group) throws MetaException, TException; + + /** + * @param role_name + * role name + * @param user_name + * user name + * @param is_role + * is the given user name a role + * @param is_group + * is the given group name a group + * @return + * @throws MetaException + * @throws TException + */ + public boolean remove_role_member(String role_name, String user_name, + boolean is_role, boolean is_group) throws MetaException, TException; + + /** + * @param principalName + * @param isRole + * @param isGroup + * @return + * @throws MetaException + * @throws TException + */ + public List list_roles(String principalName, boolean isRole, + boolean isGroup) throws MetaException, TException; + + /** + * @param user_name + * user name + * @param is_role + * is the given user name a role + * @param is_group + * is the given user name a group + * @param privileges + * a bag of privilege - including user level, db level, table level, + * and column level + * @param grantor + * the name of the grantor + * @return + * @throws MetaException + * @throws TException + */ + public boolean grant_privileges(String user_name, boolean is_role, + boolean is_group, PrivilegeBag privileges, String grantor) + throws MetaException, TException; + + /** + * @param user_name + * user name + * @param is_role + * is the given user name a role + * @param is_group + * is the given user name a group + * @param privileges + * a bag of privileges + * @return + * @throws MetaException + * @throws TException + */ + public boolean revoke_privileges(String user_name, boolean is_role, + boolean is_group, PrivilegeBag privileges) throws MetaException, + TException; + + /** + * @param user_name + * user name + * @param is_role + * is the given user name a role + * @param is_group + * is the given user name a group + * @param remove_user_priv + * true if need to remove all user level privileges that were + * assigned the the given principal + * @param dbs + * a list of database on which all db level privileges for the given + * principal are going to be remove. + * @param tables + * a list of tables on which all table level privileges for the given + * principal are going to be removed. + * @param parts + * a list of partitions on on which all partition level privileges + * for the given principal are going to be removed. + * @param columns + * a list of columns on on which all column level privileges for the + * given principal are going to be removed. + * @return + * @throws MetaException + * @throws TException + */ + public boolean revoke_all_privileges(String user_name, boolean is_role, + boolean is_group, boolean remove_user_priv, List dbs, + List
tables, List parts, + Map> columns) throws MetaException, TException; + + /** + * @param principla_name + * @param is_role + * @param is_group + * @return + * @throws MetaException + * @throws TException + */ + public List list_security_user_grant(String principla_name, + boolean is_role, boolean is_group) throws MetaException, TException; + + /** + * @param principal_name + * @param is_group + * @param is_role + * @param db_name + * @return + * @throws MetaException + * @throws TException + */ + public List list_security_db_grant(String principal_name, + boolean is_group, boolean is_role, String db_name) throws MetaException, + TException; + + /** + * @param principal_name + * @param is_group + * @param is_role + * @param db_name + * @param table_name + * @return + * @throws MetaException + * @throws TException + */ + public List list_security_table_grant( + String principal_name, boolean is_group, boolean is_role, String db_name, + String table_name) throws MetaException, TException; + + /** + * @param principal_name + * @param is_group + * @param is_role + * @param db_name + * @param table_name + * @param part_name + * @return + * @throws MetaException + * @throws TException + */ + public List list_security_partition_grant( + String principal_name, boolean is_group, boolean is_role, String db_name, + String table_name, String part_name) throws MetaException, TException; + + /** + * @param principal_name + * @param is_group + * @param is_role + * @param db_name + * @param table_name + * @param column_name + * @return + * @throws MetaException + * @throws TException + */ + public List list_security_column_grant(String principal_name, + boolean is_group, boolean is_role, String db_name, String table_name, + String column_name) throws MetaException, TException; } Index: metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (revision 1030336) +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (working copy) @@ -44,7 +44,9 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.ColumnPrivilegeBag; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Index; @@ -53,6 +55,9 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; @@ -62,6 +67,12 @@ import org.apache.hadoop.hive.metastore.model.MIndex; import org.apache.hadoop.hive.metastore.model.MOrder; import org.apache.hadoop.hive.metastore.model.MPartition; +import org.apache.hadoop.hive.metastore.model.MSecurityColumn; +import org.apache.hadoop.hive.metastore.model.MSecurityDB; +import org.apache.hadoop.hive.metastore.model.MSecurityRoleEntity; +import org.apache.hadoop.hive.metastore.model.MSecurityTablePartition; +import org.apache.hadoop.hive.metastore.model.MSecurityUser; +import org.apache.hadoop.hive.metastore.model.MSecurityUserRoleMap; import org.apache.hadoop.hive.metastore.model.MSerDeInfo; import org.apache.hadoop.hive.metastore.model.MStorageDescriptor; import org.apache.hadoop.hive.metastore.model.MTable; @@ -536,6 +547,8 @@ pm.retrieve(tbl); if (tbl != null) { // first remove all the partitions + pm.deletePersistentAll(listMSecurityTablePart(dbName, tableName)); + pm.deletePersistentAll(listMSecurityTableColumn(dbName, tableName)); pm.deletePersistentAll(listMPartitions(dbName, tableName, -1)); // then remove the table pm.deletePersistent(tbl); @@ -548,7 +561,7 @@ } return success; } - + public Table getTable(String dbName, String tableName) throws MetaException { boolean commited = false; Table tbl = null; @@ -651,7 +664,7 @@ .getRetention(), convertToStorageDescriptor(mtbl.getSd()), convertToFieldSchemas(mtbl.getPartitionKeys()), mtbl.getParameters(), mtbl.getViewOriginalText(), mtbl.getViewExpandedText(), - tableType); + tableType, null); } private MTable convertToMTable(Table tbl) throws InvalidObjectException, @@ -863,7 +876,7 @@ return new Partition(mpart.getValues(), mpart.getTable().getDatabase() .getName(), mpart.getTable().getTableName(), mpart.getCreateTime(), mpart.getLastAccessTime(), convertToStorageDescriptor(mpart.getSd()), - mpart.getParameters()); + mpart.getParameters(), null); } public boolean dropPartition(String dbName, String tableName, @@ -873,6 +886,13 @@ openTransaction(); MPartition part = getMPartition(dbName, tableName, part_vals); if (part != null) { + List schemas = part.getTable().getPartitionKeys(); + List colNames = new ArrayList(); + for (MFieldSchema col: schemas) { + colNames.add(col.getName()); + } + String partName = FileUtils.makePartName(colNames, part_vals); + pm.deletePersistent(listMSecurityPartition(dbName, tableName, partName)); pm.deletePersistent(part); } success = commitTransaction(); @@ -1395,4 +1415,1195 @@ } return pns; } + + @Override + public boolean addRole(String roleName, String ownerName) throws InvalidObjectException, + MetaException { + boolean success = false; + boolean commited = false; + try { + openTransaction(); + MSecurityRoleEntity nameCheck = this.getMRole(roleName); + if (nameCheck != null) { + throw new RuntimeException("Role " + roleName + " already exists."); + } + int now = (int)(System.currentTimeMillis()/1000); + MSecurityRoleEntity mRole = new MSecurityRoleEntity(roleName, now, + ownerName); + pm.makePersistent(mRole); + commited = commitTransaction(); + success = true; + } finally { + if (!commited) { + rollbackTransaction(); + } + } + return success; + } + + @Override + public boolean addRoleMember(Role role, String userName, boolean isRole, + boolean isGroup) throws MetaException, NoSuchObjectException { + boolean success = false; + boolean commited = false; + try { + MSecurityUserRoleMap roleMap = null; + try { + roleMap = this.getMSecurityUserRoleMap(userName, isRole, isGroup, role + .getRoleName()); + } catch (Exception e) { + e.printStackTrace(); + } + if (roleMap != null) { + throw new RuntimeException("Principal " + userName + + " already has the role " + role.getRoleName()); + } + openTransaction(); + MSecurityRoleEntity mRole = getMRole(role.getRoleName()); + long now = System.currentTimeMillis()/1000; + MSecurityUserRoleMap roleMember = new MSecurityUserRoleMap(userName, + isRole, isGroup, mRole, (int) now); + pm.makePersistent(roleMember); + commited = commitTransaction(); + success = true; + } finally { + if (!commited) { + rollbackTransaction(); + } + } + return success; + } + + @Override + public boolean removeRoleMember(Role role, String userName, boolean isRole, + boolean isGroup) throws MetaException, NoSuchObjectException { + boolean success = false; + try { + openTransaction(); + MSecurityUserRoleMap roleMember = getMSecurityUserRoleMap(userName, isRole, isGroup, role.getRoleName()); + pm.deletePersistent(roleMember); + success = commitTransaction(); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return success; + } + + private MSecurityUserRoleMap getMSecurityUserRoleMap(String userName, + boolean isRole, boolean isGroup, String roleName) { + MSecurityUserRoleMap mRoleMember = null; + boolean commited = false; + try { + openTransaction(); + Query query = pm.newQuery(MSecurityUserRoleMap.class, "principalName == t1 && isRole==t2 && isGroup == t3 && role.roleName == t4"); + query.declareParameters("java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3, java.lang.String t4"); + query.setUnique(true); + mRoleMember = (MSecurityUserRoleMap) query.executeWithArray(userName, isRole, isGroup, roleName); + pm.retrieve(mRoleMember); + commited = commitTransaction(); + } finally { + if (!commited) { + rollbackTransaction(); + } + } + return mRoleMember; + } + + @Override + public boolean removeRole(String roleName) throws MetaException, + NoSuchObjectException { + boolean success = false; + try { + openTransaction(); + MSecurityRoleEntity mRol = getMRole(roleName); + pm.retrieve(mRol); + if (mRol != null) { + // first remove all the membership, the membership that this role has + // been granted + List roleMap = listMSecurityUserRoleMember(mRol); + if (roleMap.size() > 0) { + pm.deletePersistentAll(roleMap); + } + List roleMember = listMSecurityPrincipalMembershipRole( + mRol.getRoleName(), true, false); + if (roleMember.size() > 0) { + pm.deletePersistentAll(roleMember); + } + // then remove all the grants + List userGrants = listMSecurityPrincipalUserGrant( + mRol.getRoleName(), true, false); + if (userGrants.size() > 0) { + pm.deletePersistentAll(userGrants); + } + List dbGrants = listAllMSecurityPrincipalDBGrant( + mRol.getRoleName(), true, false); + if (dbGrants.size() > 0) { + pm.deletePersistentAll(dbGrants); + } + List tabPartGrants = listAllMSecurityPrincipalTablePartGrant( + mRol.getRoleName(), true, false); + if (tabPartGrants.size() > 0) { + pm.deletePersistentAll(tabPartGrants); + } + List columnGrants = listAllMSecurityPrincipalColumnGrant( + mRol.getRoleName(), true, false); + if (columnGrants.size() > 0) { + pm.deletePersistentAll(columnGrants); + } + // last remove the role + pm.deletePersistent(mRol); + } + success = commitTransaction(); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return success; + } + + private List listRoles(String userName, List groupNames) { + List ret = new ArrayList(); + if(userName != null) { + ret.addAll(listRoles(userName, false, false)); + } + if (groupNames != null) { + for (String groupName: groupNames) { + ret.addAll(listRoles(groupName, false, true)); + } + } + return ret; + } + + @SuppressWarnings("unchecked") + public List listRoles(String principalName, + boolean isRole, boolean isGroup) { + boolean success = false; + List mRoleMember = null; + try { + openTransaction(); + LOG.debug("Executing listRoles"); + Query query = pm.newQuery(MSecurityUserRoleMap.class, + "principalName == t1 && isGroup == t2 && isRole == t3"); + query + .declareParameters("java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3"); + query.setUnique(false); + mRoleMember = (List) query.execute(principalName, + isGroup, isRole); + LOG.debug("Done executing query for listMSecurityUserRoleMap"); + pm.retrieveAll(mRoleMember); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listMSecurityUserRoleMap"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mRoleMember; + } + + + @SuppressWarnings("unchecked") + private List listMSecurityPrincipalMembershipRole(final String roleName, + final boolean isRole, final boolean isGroup) { + boolean success = false; + List mRoleMemebership = null; + try { + openTransaction(); + LOG.debug("Executing listMSecurityPrincipalMembershipRole"); + Query query = pm.newQuery(MSecurityUserRoleMap.class, + "principalName == t1 && isRole == t2 && isGroup == t3"); + query + .declareParameters("java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3"); + mRoleMemebership = (List) query.execute(roleName, + isRole, isGroup); + LOG + .debug("Done executing query for listMSecurityPrincipalMembershipRole"); + pm.retrieveAll(mRoleMemebership); + success = commitTransaction(); + LOG + .debug("Done retrieving all objects for listMSecurityPrincipalMembershipRole"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mRoleMemebership; + } + + public Role getRole(String roleName) throws NoSuchObjectException { + MSecurityRoleEntity mRole = this.getMRole(roleName); + if (mRole == null) { + throw new NoSuchObjectException(roleName + " role can not be found."); + } + Role ret = new Role(mRole.getRoleName(), mRole.getCreateTime(), mRole + .getOwnerName()); + return ret; + } + + private MSecurityRoleEntity getMRole(String roleName) { + MSecurityRoleEntity mrole = null; + boolean commited = false; + try { + openTransaction(); + Query query = pm.newQuery(MSecurityRoleEntity.class, "roleName == t1"); + query.declareParameters("java.lang.String t1"); + query.setUnique(true); + mrole = (MSecurityRoleEntity) query.execute(roleName); + pm.retrieve(mrole); + commited = commitTransaction(); + } finally { + if (!commited) { + rollbackTransaction(); + } + } + return mrole; + } + + @Override + public PrincipalPrivilegeSet getUserPrivilegeSet(String userName, + List groupNames) throws InvalidObjectException, MetaException { + boolean commited = false; + PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet(); + try { + openTransaction(); + if (userName != null) { + List user = this.listMSecurityPrincipalUserGrant(userName, false, false); + if(user.size()>0) { + Map userPriv = new HashMap(); + String userPrivStr = user.get(0).getPrivileges(); + for (int i = 1; i < user.size(); i++) { + userPrivStr = userPrivStr + ";" + user.get(i).getPrivileges(); + } + userPriv.put(userName, userPrivStr); + ret.setUserPrivileges(userPriv); + } + } + if (groupNames != null && groupNames.size() > 0) { + Map groupPriv = new HashMap(); + for(String groupName: groupNames) { + List group = this.listMSecurityPrincipalUserGrant(groupName, false, true); + if(group.size()>0) { + String groupPrivStr = group.get(0).getPrivileges(); + for (int i = 1; i < group.size(); i++) { + groupPrivStr = groupPrivStr + ";" + group.get(i).getPrivileges(); + } + groupPriv.put(groupName, groupPrivStr); + } + } + ret.setGroupPrivileges(groupPriv); + } + List roles = listRoles(userName, groupNames); + if (roles != null && roles.size() > 0) { + Map rolePrivs = new HashMap(); + for(MSecurityUserRoleMap role: roles) { + List roleUserPrivs = this.listMSecurityPrincipalUserGrant(role.getRole().getRoleName(), true, false); + if(roleUserPrivs.size()>0) { + String rolePrivStr = roleUserPrivs.get(0).getPrivileges(); + for (int i = 1; i < roleUserPrivs.size(); i++) { + rolePrivStr = rolePrivStr + ";" + roleUserPrivs.get(i).getPrivileges(); + } + rolePrivs.put(role.getRole().getRoleName(), rolePrivStr); + } + } + ret.setRolePrivileges(rolePrivs); + } + commited = commitTransaction(); + } finally { + if (!commited) { + rollbackTransaction(); + } + } + return ret; + } + + @Override + public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName, + String userName, List groupNames) throws InvalidObjectException, + MetaException { + boolean commited = false; + PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet(); + try { + openTransaction(); + if (userName != null) { + Map userDbPriv = new HashMap(); + userDbPriv.put(userName, getDBPrivilege(dbName, userName, false, false)); + ret.setUserPrivileges(userDbPriv); + } + if (groupNames != null && groupNames.size() > 0) { + Map groupDbPriv = new HashMap(); + for (String groupName : groupNames) { + groupDbPriv.put(groupName, getDBPrivilege(dbName, groupName, false, + true)); + } + ret.setGroupPrivileges(groupDbPriv); + } + List roles = listRoles(userName, groupNames); + if (roles != null && roles.size() > 0) { + Map roleDbPriv = new HashMap(); + for (MSecurityUserRoleMap role : roles) { + String name = role.getPrincipalName(); + roleDbPriv.put(name, getDBPrivilege(dbName, name, true, false)); + } + ret.setRolePrivileges(roleDbPriv); + } + commited = commitTransaction(); + } finally { + if (!commited) { + rollbackTransaction(); + } + } + return ret; + } + + @Override + public PrincipalPrivilegeSet getPartitionPrivilegeSet(String dbName, + String tableName, String partition, String userName, + List groupNames) throws InvalidObjectException, MetaException { + boolean commited = false; + PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet(); + try { + openTransaction(); + if (userName != null) { + Map userPartitionPriv = new HashMap(); + userPartitionPriv.put(userName, getPartitionPrivilege(dbName, + tableName, partition, userName, false, false)); + ret.setUserPrivileges(userPartitionPriv); + } + if (groupNames != null && groupNames.size() > 0) { + Map groupPartitionPriv = new HashMap(); + for (String groupName : groupNames) { + groupPartitionPriv.put(groupName, getPartitionPrivilege(dbName, tableName, + partition, groupName, false, true)); + } + ret.setGroupPrivileges(groupPartitionPriv); + } + List roles = listRoles(userName, groupNames); + if (roles != null && roles.size() > 0) { + Map rolePartPriv = new HashMap(); + for (MSecurityUserRoleMap role : roles) { + String roleName = role.getPrincipalName(); + rolePartPriv.put(roleName, getPartitionPrivilege(dbName, tableName, + partition, roleName, true, false)); + } + ret.setRolePrivileges(rolePartPriv); + } + commited = commitTransaction(); + } finally { + if (!commited) { + rollbackTransaction(); + } + } + return ret; + } + + @Override + public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName, + String tableName, String userName, List groupNames) + throws InvalidObjectException, MetaException { + boolean commited = false; + PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet(); + try { + openTransaction(); + if (userName != null) { + Map userPartitionPriv = new HashMap(); + userPartitionPriv.put(userName, getTablePrivilege(dbName, + tableName, userName, false, false)); + ret.setUserPrivileges(userPartitionPriv); + } + if (groupNames != null && groupNames.size() > 0) { + Map groupPartitionPriv = new HashMap(); + for (String groupName : groupNames) { + groupPartitionPriv.put(groupName, getTablePrivilege(dbName, tableName, + groupName, false, true)); + } + ret.setGroupPrivileges(groupPartitionPriv); + } + List roles = listRoles(userName, groupNames); + if (roles != null && roles.size() > 0) { + Map rolePartPriv = new HashMap(); + for (MSecurityUserRoleMap role : roles) { + String roleName = role.getPrincipalName(); + rolePartPriv.put(roleName, getTablePrivilege(dbName, tableName, + roleName, true, false)); + } + ret.setRolePrivileges(rolePartPriv); + } + commited = commitTransaction(); + } finally { + if (!commited) { + rollbackTransaction(); + } + } + return ret; + } + + @Override + public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName, + String tableName, String partitionName, String columnName, + String userName, List groupNames) throws InvalidObjectException, + MetaException { + boolean commited = false; + PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet(); + try { + openTransaction(); + if (userName != null) { + Map userPartitionPriv = new HashMap(); + userPartitionPriv.put(userName, getColumnPrivilege(dbName, tableName, + columnName, userName, false, false)); + ret.setUserPrivileges(userPartitionPriv); + } + if (groupNames != null && groupNames.size() > 0) { + Map groupPartitionPriv = new HashMap(); + for (String groupName : groupNames) { + groupPartitionPriv.put(groupName, getColumnPrivilege(dbName, tableName,columnName, + groupName, false, true)); + } + ret.setGroupPrivileges(groupPartitionPriv); + } + List roles = listRoles(userName, groupNames); + if (roles != null && roles.size() > 0) { + Map rolePartPriv = new HashMap(); + for (MSecurityUserRoleMap role : roles) { + String roleName = role.getPrincipalName(); + rolePartPriv.put(roleName, getColumnPrivilege(dbName, tableName,columnName, + roleName, true, false)); + } + ret.setRolePrivileges(rolePartPriv); + } + commited = commitTransaction(); + } finally { + if (!commited) { + rollbackTransaction(); + } + } + return ret; + } + + @Override + public boolean grantPrivileges(String userName, boolean isRole, + boolean isGroup, PrivilegeBag privileges, String grantor) + throws InvalidObjectException, MetaException, NoSuchObjectException { + boolean committed = false; + int now = (int)(System.currentTimeMillis()/1000); + try { + openTransaction(); + List persistentObjs = new ArrayList(); + + String userPrivs = privileges.getUserPrivileges(); + if (userPrivs != null) { + MSecurityUser mDb = new MSecurityUser(userName, isRole, isGroup, + userPrivs, now, grantor); + persistentObjs.add(mDb); + } + + Map dbPrivs = privileges.getDbPrivileges(); + if (dbPrivs != null) { + for (Map.Entry db : dbPrivs.entrySet()) { + MDatabase dbObj = getMDatabase(db.getKey().getName()); + if (dbObj != null) { + MSecurityDB mDb = new MSecurityDB(userName, isRole, isGroup, dbObj, + db.getValue(), now, grantor); + persistentObjs.add(mDb); + } + } + } + + Map tablePriv = privileges.getTablePrivileges(); + if (tablePriv != null) { + for (Map.Entry table : tablePriv.entrySet()) { + MTable tblObj = getMTable(table.getKey().getDbName(), table.getKey() + .getTableName()); + if (tblObj != null) { + MSecurityTablePartition mTab = new MSecurityTablePartition( + userName, isRole, isGroup, tblObj, null, table.getValue(), now, + grantor); + persistentObjs.add(mTab); + } + } + } + + Map partitionPriv = privileges.getPartitionPrivileges(); + if (partitionPriv != null) { + for (Map.Entry part : partitionPriv.entrySet()) { + Partition partObj = part.getKey(); + MPartition tblObj = this.getMPartition(partObj.getDbName(), partObj + .getTableName(), partObj.getValues()); + if (tblObj != null) { + MSecurityTablePartition mTab = new MSecurityTablePartition( + userName, isRole, isGroup, tblObj.getTable(), tblObj, part + .getValue(), now, grantor); + persistentObjs.add(mTab); + } + } + } + + List columnPriv = privileges.getColumnPrivileges(); + if (columnPriv != null) { + for (ColumnPrivilegeBag col : columnPriv) { + Map columnMap = col.getColumnPrivileges(); + MTable tblObj = getMTable(col.getDbName(), col.getTableName()); + if (columnMap != null && tblObj != null) { + for (Map.Entry colPriv : columnMap.entrySet()) { + MSecurityColumn mCol = new MSecurityColumn(userName, isRole, + isGroup, tblObj, colPriv.getKey(), colPriv.getValue(), now, + grantor); + persistentObjs.add(mCol); + } + } + } + } + if (persistentObjs.size() > 0) { + pm.makePersistentAll(persistentObjs); + } + committed = commitTransaction(); + } finally { + if (!committed) { + rollbackTransaction(); + } + } + return committed; + } + + public boolean revokeAllPrivileges(String userName, boolean isRole, + boolean isGroup, boolean removeUserPriv, List dbs, + List
tables, List parts, + Map> columns) throws MetaException { + boolean committed = false; + try { + openTransaction(); + List persistentObjs = new ArrayList(); + if (removeUserPriv) { + List mSecUser = this.listMSecurityPrincipalUserGrant( + userName, isRole, isGroup); + if (mSecUser != null) { + persistentObjs.addAll(persistentObjs); + } + } + + if (dbs != null) { + for (Database db : dbs) { + List dbGrants = this.listMSecurityPrincipalDBGrant( + userName, isGroup, isRole, db.getName()); + persistentObjs.addAll(dbGrants); + } + } + + if(tables != null) { + for (Table tab : tables) { + List tabGrants = + this.listMSecurityPrincipalTableGrant(userName, isGroup, isRole, + tab.getDbName(), tab.getTableName()); + persistentObjs.addAll(tabGrants); + } + } + + if(parts != null) { + for (Partition part : parts) { + Table tabObj = this.getTable(part.getDbName(), part.getTableName()); + List partGrants = this + .listMSecurityPrincipalPartitionGrant(userName, isGroup, isRole, + part.getDbName(), part.getTableName(), + Warehouse.makePartName(tabObj.getPartitionKeys(), part.getValues())); + persistentObjs.addAll(partGrants); + } + } + + if(columns != null) { + for (Map.Entry> tableColMap : columns.entrySet()){ + Table table = tableColMap.getKey(); + List colList = tableColMap.getValue(); + for(String col : colList) { + List secCol = this.listMSecurityPrincipalColumnGrant(userName, isGroup, isRole, + table.getDbName(), table.getTableName(), col); + persistentObjs.addAll(secCol); + } + } + } + + if (persistentObjs.size() > 0) { + pm.deletePersistentAll(persistentObjs); + } + committed = commitTransaction(); + } finally { + if (!committed) { + rollbackTransaction(); + } + } + return committed; + } + + @Override + public boolean revokePrivileges(String userName, boolean isRole, + boolean isGroup, PrivilegeBag privileges) throws InvalidObjectException, + MetaException { + boolean committed = false; + try { + openTransaction(); + List persistentObjs = new ArrayList(); + String userPriv = privileges.getUserPrivileges(); + if (userPriv != null && !userPriv.trim().equals("")) { + List mSecUser = this.listMSecurityPrincipalUserGrant( + userName, isRole, isGroup); + boolean found = false; + if (mSecUser != null) { + String[] userPrivArray = userPriv.split(","); + for (MSecurityUser userGrant : mSecUser) { + String[] userGrantArray = userGrant.getPrivileges().split(","); + if (stringArrayEqualsIgnoreOrder(userPrivArray, userGrantArray)) { + found = true; + persistentObjs.add(userGrant); + break; + } + } + if (!found) { + throw new InvalidObjectException( + "No user grant found for privileges " + userPriv); + } + } + } + + Map dbPrivileges = privileges.getDbPrivileges(); + if (dbPrivileges != null) { + for (Map.Entry dbPriv : dbPrivileges.entrySet()) { + Database db = dbPriv.getKey(); + String dbPrivStr = dbPriv.getValue(); + boolean found = false; + String[] dbPrivArray = dbPrivStr.split(","); + List dbGrants = this.listMSecurityPrincipalDBGrant( + userName, isGroup, isRole, db.getName()); + for (MSecurityDB dbGrant : dbGrants) { + String[] privStrArray = dbGrant.getPrivileges().split(","); + if (stringArrayEqualsIgnoreOrder(dbPrivArray, privStrArray)) { + found = true; + persistentObjs.add(dbGrant); + break; + } + } + if (!found) { + throw new InvalidObjectException( + "No database grant found for privileges " + dbPrivStr + + " on database " + db.getName()); + } + } + } + + Map tablePrivileges = privileges.getTablePrivileges(); + if (tablePrivileges != null) { + for (Map.Entry tabPriv : tablePrivileges.entrySet()) { + Table table = tabPriv.getKey(); + String tblPrivStr = tabPriv.getValue(); + boolean found = false; + String[] tablePrivArray = tblPrivStr.split(","); + List tableGrants = + this.listMSecurityPrincipalTableGrant(userName, isGroup, isRole, + table.getDbName(), table.getTableName()); + for (MSecurityTablePartition tabGrant : tableGrants) { + String[] privStrArray = tabGrant.getPrivileges().split(","); + if (stringArrayEqualsIgnoreOrder(privStrArray, tablePrivArray)) { + found = true; + persistentObjs.add(tabGrant); + break; + } + } + if (!found) { + throw new InvalidObjectException("No grant (" + tblPrivStr + + ") found " + " on table " + table.getTableName() + + ", database is " + table.getDbName()); + } + } + } + + Map partPrivileges = privileges + .getPartitionPrivileges(); + if (partPrivileges != null) { + for (Map.Entry partPriv : partPrivileges.entrySet()) { + Partition part = partPriv.getKey(); + String partPrivStr = partPriv.getValue(); + boolean found = false; + String[] partPrivArray = partPrivStr.split(","); + Table tabObj = this.getTable(part.getDbName(), part.getTableName()); + List partitionGrants = + this.listMSecurityPrincipalPartitionGrant(userName, isGroup, isRole, + part.getTableName(), part.getDbName(), + Warehouse.makePartName(tabObj.getPartitionKeys(), part.getValues())); + for (MSecurityTablePartition tabGrant : partitionGrants) { + String[] privStrArray = tabGrant.getPrivileges().split(","); + if (stringArrayEqualsIgnoreOrder(privStrArray, partPrivArray)) { + found = true; + persistentObjs.add(tabGrant); + break; + } + } + if (!found) { + throw new InvalidObjectException("No grant (" + partPrivStr + + ") found " + " on table " + tabObj.getTableName() + + ", database is " + tabObj.getDbName()); + } + } + } + + List columnPrivileges = privileges + .getColumnPrivileges(); + if (columnPrivileges != null) { + for (ColumnPrivilegeBag colPriv : columnPrivileges) { + String dbName = colPriv.getDbName(); + String tabName = colPriv.getTableName(); + Map colPrivMap = colPriv.getColumnPrivileges(); + for (Map.Entry column : colPrivMap.entrySet()) { + List mSecCol = this.listMSecurityPrincipalColumnGrant( + userName, isGroup, isRole, dbName, tabName, column.getKey()); + boolean found = false; + if (mSecCol != null) { + String[] toBeMatched = column.getValue().split(","); + for (MSecurityColumn col : mSecCol) { + String[] candicate = col.getPrivileges().split(","); + if (stringArrayEqualsIgnoreOrder(candicate, toBeMatched)) { + found = true; + persistentObjs.add(col); + break; + } + } + if (!found) { + throw new InvalidObjectException("No grant (" + column + + ") found " + " on column " + column.getKey() + " table " + + tabName + ", database is " + dbName); + } + } + } + } + } + if (persistentObjs.size() > 0) { + pm.deletePersistentAll(persistentObjs); + } + committed = commitTransaction(); + } finally { + if (!committed) { + rollbackTransaction(); + } + } + return committed; + } + + private boolean stringArrayEqualsIgnoreOrder(String[] o1, String[] o2) { + if (o1 == o2) { + return true; + } + if (o1 != null && o2 != null) { + for (int i = 0; i < o1.length; i++) { + boolean found = false; + for (int j = 0; j < o2.length; j++) { + if (o1[i].equalsIgnoreCase(o2[j])) { + found = true; + break; + } + } + + if (!found) { + return false; + } + } + return true; + } else { + return false; + } + } + + public String getDBPrivilege(String dbName, + String principalName, boolean isRole, boolean isGroup) + throws InvalidObjectException, MetaException { + String privileges = null; + openTransaction(); + if (principalName != null) { + List userNameDbPriv = this.listMSecurityPrincipalDBGrant( + principalName, isGroup, isRole, dbName); + if (userNameDbPriv != null && userNameDbPriv.size() > 0) { + privileges = userNameDbPriv.get(0).getPrivileges(); + for (int i = 1; i < userNameDbPriv.size(); i++) { + privileges = privileges + "," + + userNameDbPriv.get(i).getPrivileges(); + } + } + } + return privileges; + } + + private String getTablePrivilege(String dbName, String tableName, + String principalName, boolean isRole, boolean isGroup) { + String privileges = null; + if (principalName != null) { + List userNameTabPartPriv = this + .listMSecurityPrincipalTableGrant(principalName, isGroup, isRole, + dbName, tableName); + pm.retrieveAll(userNameTabPartPriv); + if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) { + privileges = userNameTabPartPriv.get(0).getPrivileges(); + for (int i = 1; i < userNameTabPartPriv.size(); i++) { + privileges = privileges + "," + + userNameTabPartPriv.get(i).getPrivileges(); + } + } + } + return privileges; + } + + private String getPartitionPrivilege(String dbName, String principalName, + String tableName, String partName, boolean isRole, boolean isGroup) { + String privileges = null; + if (principalName != null) { + List userNameTabPartPriv = this + .listMSecurityPrincipalPartitionGrant(principalName, isGroup, isRole, + dbName, tableName, partName); + if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) { + privileges = userNameTabPartPriv.get(0).getPrivileges(); + for (int i = 1; i < userNameTabPartPriv.size(); i++) { + privileges = privileges + "," + + userNameTabPartPriv.get(i).getPrivileges(); + } + } + } + return privileges; + } + + private String getColumnPrivilege(String dbName, String tableName, + String columnName, String principalName, boolean isRole, boolean isGroup) { + String privileges = null; + List userNameTabPartPriv = this + .listMSecurityPrincipalColumnGrant(principalName, isGroup, isRole, + dbName, tableName, columnName); + if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) { + privileges = userNameTabPartPriv.get(0).getPrivileges(); + for (int i = 1; i < userNameTabPartPriv.size(); i++) { + privileges = privileges + "," + + userNameTabPartPriv.get(i).getPrivileges(); + } + } + return privileges; + } + + @SuppressWarnings("unchecked") + private List listMSecurityUserRoleMember( + MSecurityRoleEntity mRol) { + boolean success = false; + List mRoleMemeberList = null; + try { + openTransaction(); + LOG.debug("Executing listMSecurityUserRoleMember"); + Query query = pm.newQuery(MSecurityUserRoleMap.class, + "role.roleName == t1"); + query.declareParameters("java.lang.String t1"); + query.setUnique(false); + mRoleMemeberList = (List) query.execute( + mRol.getRoleName()); + LOG.debug("Done executing query for listMSecurityUserRoleMember"); + pm.retrieveAll(mRoleMemeberList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listMSecurityUserRoleMember"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mRoleMemeberList; + } + + @SuppressWarnings("unchecked") + public List listMSecurityPrincipalUserGrant(String principlaName, + boolean isRole, boolean isGroup) { + boolean commited = false; + List userNameDbPriv = null; + try { + openTransaction(); + if (principlaName != null) { + Query query = pm.newQuery(MSecurityUser.class, + "principalName == t1 && isRole == t2 && isGroup== t3"); + query.declareParameters( + "java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3"); + userNameDbPriv = (List) query + .executeWithArray(principlaName, isRole, isGroup); + pm.retrieveAll(userNameDbPriv); + } + commited = commitTransaction(); + } finally { + if (!commited) { + rollbackTransaction(); + } + } + return userNameDbPriv; + } + + @SuppressWarnings("unchecked") + public List listMSecurityPrincipalDBGrant(String principalName, + boolean isGroup, boolean isRole, String dbName) { + boolean success = false; + List mSecurityDBList = null; + try { + openTransaction(); + LOG.debug("Executing listMSecurityPrincipalDBGrant"); + Query query = pm.newQuery(MSecurityDB.class, + "principalName == t1 && isGroup == t2 && isRole == t3 && database.name == t4"); + query + .declareParameters("java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3, java.lang.String t4"); + mSecurityDBList = (List) query.executeWithArray(principalName, + isGroup, isRole, dbName); + LOG.debug("Done executing query for listMSecurityPrincipalDBGrant"); + pm.retrieveAll(mSecurityDBList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listMSecurityPrincipalDBGrant"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mSecurityDBList; + } + + @SuppressWarnings("unchecked") + private List listAllMSecurityPrincipalDBGrant(String principalName, + boolean isGroup, boolean isRole) { + boolean success = false; + List mSecurityDBList = null; + try { + openTransaction(); + LOG.debug("Executing listAllMSecurityPrincipalDBGrant"); + Query query = pm.newQuery(MSecurityDB.class, + "principalName == t1 && isGroup == t2 && isRole == t3"); + query + .declareParameters("java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3"); + mSecurityDBList = (List) query.execute(principalName, + isGroup, isRole); + LOG.debug("Done executing query for listAllMSecurityPrincipalDBGrant"); + pm.retrieveAll(mSecurityDBList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listAllMSecurityPrincipalDBGrant"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mSecurityDBList; + } + + @SuppressWarnings("unchecked") + public List listMSecurityTablePart(String dbName, + String tableName) { + boolean success = false; + List mSecurityTabPartList = null; + try { + openTransaction(); + LOG.debug("Executing listMSecurityTable"); + Query query = pm.newQuery( + MSecurityTablePartition.class, + "table.tableName == t1 && table.database.name == t2"); + query.declareParameters( + "java.lang.String t1, java.lang.String t2"); + mSecurityTabPartList = (List) query + .executeWithArray(tableName, dbName); + LOG.debug("Done executing query for listMSecurityTable"); + pm.retrieveAll(mSecurityTabPartList); + success = commitTransaction(); + LOG + .debug("Done retrieving all objects for listMSecurityTable"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mSecurityTabPartList; + + } + + @SuppressWarnings("unchecked") + public List listMSecurityPrincipalTableGrant( + String principalName, boolean isGroup, boolean isRole, String dbName, + String tableName) { + boolean success = false; + List mSecurityTabPartList = null; + try { + openTransaction(); + LOG.debug("Executing listMSecurityPrincipalTableGrant"); + Query query = pm.newQuery( + MSecurityTablePartition.class, + "principalName == t1 && isGroup == t2 && isRole == t3 && table.tableName == t4 && table.database.name == t5"); + query.declareParameters( + "java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3, java.lang.String t4, java.lang.String t5"); + mSecurityTabPartList = (List) query + .executeWithArray(principalName, isGroup, isRole, tableName, dbName); + LOG.debug("Done executing query for listMSecurityPrincipalTableGrant"); + pm.retrieveAll(mSecurityTabPartList); + success = commitTransaction(); + LOG + .debug("Done retrieving all objects for listMSecurityPrincipalTableGrant"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mSecurityTabPartList; + } + + @SuppressWarnings("unchecked") + private Object listMSecurityPartition(String dbName, String tableName, + String partName) { + boolean success = false; + List mSecurityTabPartList = null; + try { + openTransaction(); + LOG.debug("Executing listMSecurityTablePartition"); + Query query = pm.newQuery( + MSecurityTablePartition.class, + "table.tableName == t1 && table.database.name == t2 && partition.partitionName == t3"); + query.declareParameters( + "java.lang.String t1, java.lang.String t2, java.lang.String t3"); + mSecurityTabPartList = (List) query + .executeWithArray(tableName, dbName, partName); + LOG.debug("Done executing query for listMSecurityTablePartition"); + pm.retrieveAll(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listMSecurityTablePartition"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mSecurityTabPartList; + } + + @SuppressWarnings("unchecked") + public List listMSecurityPrincipalPartitionGrant( + String principalName, boolean isGroup, boolean isRole, String dbName, + String tableName, String partName) { + boolean success = false; + List mSecurityTabPartList = null; + try { + openTransaction(); + LOG.debug("Executing listMSecurityPrincipalPartitionGrant"); + Query query = pm.newQuery( + MSecurityTablePartition.class, + "principalName == t1 && isGroup == t2 && isRole == t3 && table.tableName == t4 && table.database.name == t5 && partition.partitionName == t6"); + query.declareParameters( + "java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3, java.lang.String t4, java.lang.String t5, java.lang.String t6"); + mSecurityTabPartList = (List) query + .executeWithArray(principalName, isGroup, isRole, tableName, dbName, partName); + LOG.debug("Done executing query for listMSecurityPrincipalPartitionGrant"); + pm.retrieveAll(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listMSecurityPrincipalPartitionGrant"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mSecurityTabPartList; + } + + @SuppressWarnings("unchecked") + private List listAllMSecurityPrincipalTablePartGrant( + String principalName, boolean isGroup, boolean isRole) { + boolean success = false; + List mSecurityTabPartList = null; + try { + openTransaction(); + LOG.debug("Executing listAllMSecurityPrincipalTablePartGrant"); + Query query = pm.newQuery(MSecurityTablePartition.class, + "principalName == t1 && isGroup == t2 && isRole == t3"); + query + .declareParameters("java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3"); + mSecurityTabPartList = (List) query.execute( + principalName, isGroup, isRole); + LOG.debug("Done executing query for listAllMSecurityPrincipalTablePartGrant"); + pm.retrieveAll(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listAllMSecurityPrincipalTablePartGrant"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mSecurityTabPartList; + } + + @SuppressWarnings("unchecked") + public List listMSecurityTableColumn(String dbName, String tableName) { + boolean success = false; + List mSecurityColList = null; + try { + openTransaction(); + LOG.debug("Executing listMSecurityTableColumn"); + Query query = pm.newQuery( + MSecurityColumn.class, + "table.tableName == t1 && table.database.name == t2"); + query.declareParameters( + "java.lang.String t1, java.lang.String t2"); + mSecurityColList = (List) query + .executeWithArray(tableName, dbName); + LOG.debug("Done executing query for listMSecurityTableColumn"); + pm.retrieveAll(mSecurityColList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listMSecurityTableColumn"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mSecurityColList; + } + + @SuppressWarnings("unchecked") + public List listMSecurityPrincipalColumnGrant( + String principalName, boolean isGroup, boolean isRole, String dbName, + String tableName, String columnName) { + boolean success = false; + List mSecurityColList = null; + try { + openTransaction(); + LOG.debug("Executing listMSecurityPrincipalColumnGrant"); + Query query = pm.newQuery( + MSecurityColumn.class, + "principalName == t1 && isGroup == t2 && isRole == t3 && table.tableName == t4 && table.database.name == t5 && columnName == t6"); + query.declareParameters( + "java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3, java.lang.String t4, java.lang.String t5, java.lang.String t6"); + mSecurityColList = (List) query + .executeWithArray(principalName, isGroup, isRole, tableName, dbName, + columnName); + LOG.debug("Done executing query for listMSecurityPrincipalColumnGrant"); + pm.retrieveAll(mSecurityColList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listMSecurityPrincipalColumnGrant"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mSecurityColList; + } + + @SuppressWarnings("unchecked") + private List listAllMSecurityPrincipalColumnGrant( + String principalName, boolean isGroup, boolean isRole) { + boolean success = false; + List mSecurityColumnList = null; + try { + openTransaction(); + LOG.debug("Executing listAllMSecurityPrincipalColumnGrant"); + Query query = pm.newQuery(MSecurityColumn.class, + "principalName == t1 && isGroup == t2 && isRole == t3"); + query + .declareParameters("java.lang.String t1, java.lang.Boolean t2, java.lang.Boolean t3"); + mSecurityColumnList = (List) query.execute( + principalName, isGroup, isRole); + LOG.debug("Done executing query for listAllMSecurityPrincipalColumnGrant"); + pm.retrieveAll(mSecurityColumnList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listAllMSecurityPrincipalColumnGrant"); + } finally { + if (!success) { + rollbackTransaction(); + } + } + return mSecurityColumnList; + } + } Index: metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java (revision 1030336) +++ metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java (working copy) @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.metastore; import java.util.List; +import java.util.Map; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hive.metastore.api.Database; @@ -27,8 +28,16 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.Type; +import org.apache.hadoop.hive.metastore.model.MSecurityColumn; +import org.apache.hadoop.hive.metastore.model.MSecurityDB; +import org.apache.hadoop.hive.metastore.model.MSecurityTablePartition; +import org.apache.hadoop.hive.metastore.model.MSecurityUser; +import org.apache.hadoop.hive.metastore.model.MSecurityUserRoleMap; public interface RawStore extends Configurable { @@ -128,5 +137,64 @@ public abstract List getPartitionsByFilter( String dbName, String tblName, String filter, short maxParts) throws MetaException, NoSuchObjectException; + + public abstract boolean addRole(String rowName, String ownerName) throws InvalidObjectException, MetaException; + + public abstract boolean removeRole(String roleName) throws MetaException, NoSuchObjectException; + + public abstract boolean addRoleMember(Role role, String userName, boolean isRole, boolean isGroup) + throws MetaException, NoSuchObjectException; + + public abstract boolean removeRoleMember(Role role, String userName, boolean isRole, boolean isGroup) + throws MetaException, NoSuchObjectException; + + public abstract PrincipalPrivilegeSet getUserPrivilegeSet(String userName, + List groupNames) throws InvalidObjectException, MetaException; + + public abstract PrincipalPrivilegeSet getDBPrivilegeSet (String dbName, String userName, + List groupNames) throws InvalidObjectException, MetaException; + + public abstract PrincipalPrivilegeSet getTablePrivilegeSet (String dbName, String tableName, + String userName, List groupNames) throws InvalidObjectException, MetaException; + + public abstract PrincipalPrivilegeSet getPartitionPrivilegeSet (String dbName, String tableName, + String partition, String userName, List groupNames) throws InvalidObjectException, MetaException; + + public abstract PrincipalPrivilegeSet getColumnPrivilegeSet (String dbName, String tableName, String partitionName, + String columnName, String userName, List groupNames) throws InvalidObjectException, MetaException; + + public abstract List listMSecurityPrincipalUserGrant(String principlaName, + boolean isRole, boolean isGroup); + + public abstract List listMSecurityPrincipalDBGrant(String principalName, + boolean isGroup, boolean isRole, String dbName); + + public abstract List listMSecurityPrincipalTableGrant( + String principalName, boolean isGroup, boolean isRole, String dbName, + String tableName); + + public abstract List listMSecurityPrincipalPartitionGrant( + String principalName, boolean isGroup, boolean isRole, String dbName, + String tableName, String partName); + + public abstract List listMSecurityPrincipalColumnGrant( + String principalName, boolean isGroup, boolean isRole, String dbName, + String tableName, String columnName); + + public abstract boolean grantPrivileges (String userName, boolean isRole, boolean isGroup, PrivilegeBag privileges, String grantor) + throws InvalidObjectException, MetaException, NoSuchObjectException; + + public abstract boolean revokePrivileges (String userName, boolean isRole, boolean isGroup, PrivilegeBag privileges) + throws InvalidObjectException, MetaException; + + public abstract org.apache.hadoop.hive.metastore.api.Role getRole(String roleName) throws NoSuchObjectException; + + public List listRoles(String principalName, + boolean isRole, boolean isGroup); + + public boolean revokeAllPrivileges(String userName, boolean isRole, + boolean isGroup, boolean removeUserPriv, List dbs, + List
tables, List parts, + Map> columns) throws MetaException; } Index: metastore/src/model/package.jdo =================================================================== --- metastore/src/model/package.jdo (revision 1030336) +++ metastore/src/model/package.jdo (working copy) @@ -298,13 +298,13 @@ - + - + @@ -345,5 +345,184 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Index: metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityColumn.java =================================================================== --- metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityColumn.java (revision 0) +++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityColumn.java (revision 0) @@ -0,0 +1,158 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.model; + +public class MSecurityColumn { + + private String principalName; + + private boolean isRole; + + private boolean isGroup; + + private MTable table; + + private String columnName; + + private String privileges; + + private int createTime; + + private String grantor; + + public MSecurityColumn() { + } + + public MSecurityColumn(String principalName, boolean isRole, boolean isGroup, + MTable table, String columnName, String privileges, int createTime, + String grantor) { + super(); + this.principalName = principalName; + this.isRole = isRole; + this.isGroup = isGroup; + this.table = table; + this.columnName = columnName; + this.privileges = privileges; + this.createTime = createTime; + this.grantor = grantor; + } + + /** + * @return true if this user name is a role + */ + public boolean getIsRole() { + return isRole; + } + + /** + * @param isRole is this user name a role? + */ + public void setIsRole(boolean isRole) { + this.isRole = isRole; + } + + /** + * @return true if this user name is a group, false else + */ + public boolean getIsGroup() { + return isGroup; + } + + /** + * @param isGroup true if is this user name a group + */ + public void setIsGroup(boolean isGroup) { + this.isGroup = isGroup; + } + + /** + * @return column name + */ + public String getColumnName() { + return columnName; + } + + /** + * @param columnName column name + */ + public void setColumnName(String columnName) { + this.columnName = columnName; + } + + /** + * @return a set of privileges this user/role/group has + */ + public String getPrivileges() { + return privileges; + } + + /** + * @param dbPrivileges a set of privileges this user/role/group has + */ + public void setPrivileges(String dbPrivileges) { + this.privileges = dbPrivileges; + } + + /** + * @return create time + */ + public int getCreateTime() { + return createTime; + } + + /** + * @param createTime create time + */ + public void setCreateTime(int createTime) { + this.createTime = createTime; + } + + public String getPrincipalName() { + return principalName; + } + + public void setPrincipalName(String principalName) { + this.principalName = principalName; + } + + public MTable getTable() { + return table; + } + + public void setTable(MTable table) { + this.table = table; + } + + public void setRole(boolean isRole) { + this.isRole = isRole; + } + + public void setGroup(boolean isGroup) { + this.isGroup = isGroup; + } + + public String getGrantor() { + return grantor; + } + + public void setGrantor(String grantor) { + this.grantor = grantor; + } + +} Index: metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityDB.java =================================================================== --- metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityDB.java (revision 0) +++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityDB.java (revision 0) @@ -0,0 +1,140 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.model; + +public class MSecurityDB { + + private String principalName; + + private boolean isRole; + + private boolean isGroup; + + private MDatabase database; + + private int createTime; + + private String privileges; + + private String grantor; + + public MSecurityDB() { + } + + public MSecurityDB(String principalName, boolean isRole, boolean isGroup, + MDatabase database, String dbPrivileges, int createTime, String grantor) { + super(); + this.principalName = principalName; + this.isRole = isRole; + this.isGroup = isGroup; + this.database = database; + this.privileges = dbPrivileges; + this.createTime = createTime; + this.grantor = grantor; + } + + /** + * @return user name, role name, or group name + */ + public String getPrincipalName() { + return principalName; + } + + /** + * @param userName user/role/group name + */ + public void setPrincipalName(String userName) { + this.principalName = userName; + } + + /** + * @return true if this user name is a role + */ + public boolean getIsRole() { + return isRole; + } + + /** + * @param isRole is this user name a role? + */ + public void setIsRole(boolean isRole) { + this.isRole = isRole; + } + + /** + * @return true if this user name is a group, false else + */ + public boolean getIsGroup() { + return isGroup; + } + + /** + * @param isGroup is this user name a group + */ + public void setIsGroup(boolean isGroup) { + this.isGroup = isGroup; + } + + /** + * @return a set of privileges this user/role/group has + */ + public String getPrivileges() { + return privileges; + } + + /** + * @param dbPrivileges a set of privileges this user/role/group has + */ + public void setPrivileges(String dbPrivileges) { + this.privileges = dbPrivileges; + } + + public MDatabase getDatabase() { + return database; + } + + public void setDatabase(MDatabase database) { + this.database = database; + } + + public void setRole(boolean isRole) { + this.isRole = isRole; + } + + public void setGroup(boolean isGroup) { + this.isGroup = isGroup; + } + + public int getCreateTime() { + return createTime; + } + + public void setCreateTime(int createTime) { + this.createTime = createTime; + } + + public String getGrantor() { + return grantor; + } + + public void setGrantor(String grantor) { + this.grantor = grantor; + } + +} Index: metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityRoleEntity.java =================================================================== --- metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityRoleEntity.java (revision 0) +++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityRoleEntity.java (revision 0) @@ -0,0 +1,75 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.model; + +public class MSecurityRoleEntity { + + private String roleName; + + private int createTime; + + private String ownerName; + + public MSecurityRoleEntity() { + } + + public MSecurityRoleEntity(String roleName, int createTime, String ownerName) { + super(); + this.roleName = roleName; + this.createTime = createTime; + this.ownerName = ownerName; + } + + /** + * @return role name + */ + public String getRoleName() { + return roleName; + } + + /** + * @param roleName + */ + public void setRoleName(String roleName) { + this.roleName = roleName; + } + + /** + * @return create time + */ + public int getCreateTime() { + return createTime; + } + + /** + * @param createTime + */ + public void setCreateTime(int createTime) { + this.createTime = createTime; + } + + public String getOwnerName() { + return ownerName; + } + + public void setOwnerName(String ownerName) { + this.ownerName = ownerName; + } + +} Index: metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityTablePartition.java =================================================================== --- metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityTablePartition.java (revision 0) +++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityTablePartition.java (revision 0) @@ -0,0 +1,158 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.model; + +public class MSecurityTablePartition { + + private String principalName; + + private boolean isRole; + + private boolean isGroup; + + private MTable table; + + private MPartition partition; + + private String privileges; + + private int createTime; + + private String grantor; + + public MSecurityTablePartition() { + } + + public MSecurityTablePartition(String principalName, boolean isRole, + boolean isGroup, MTable table, MPartition partition, String privileges, + int createTime, String grantor) { + super(); + this.principalName = principalName; + this.isRole = isRole; + this.isGroup = isGroup; + this.table = table; + this.partition = partition; + this.privileges = privileges; + this.createTime = createTime; + this.grantor = grantor; + } + + public String getPrincipalName() { + return principalName; + } + + public void setPrincipalName(String principalName) { + this.principalName = principalName; + } + + /** + * @return true if this user name is a role + */ + public boolean getIsRole() { + return isRole; + } + + /** + * @param isRole is this user name a role? + */ + public void setIsRole(boolean isRole) { + this.isRole = isRole; + } + + /** + * @return true if this user name is a group, false else + */ + public boolean getIsGroup() { + return isGroup; + } + + /** + * @param isGroup true if is this user name a group + */ + public void setIsGroup(boolean isGroup) { + this.isGroup = isGroup; + } + + /** + * @return a set of privileges this user/role/group has + */ + public String getPrivileges() { + return privileges; + } + + /** + * @param dbPrivileges a set of privileges this user/role/group has + */ + public void setPrivileges(String dbPrivileges) { + this.privileges = dbPrivileges; + } + + /** + * @return create time + */ + public int getCreateTime() { + return createTime; + } + + /** + * @param createTime create time + */ + public void setCreateTime(int createTime) { + this.createTime = createTime; + } + + /** + * @return + */ + public String getGrantor() { + return grantor; + } + + /** + * @param grantor + */ + public void setGrantor(String grantor) { + this.grantor = grantor; + } + + public void setRole(boolean isRole) { + this.isRole = isRole; + } + + public void setGroup(boolean isGroup) { + this.isGroup = isGroup; + } + + public MTable getTable() { + return table; + } + + public void setTable(MTable table) { + this.table = table; + } + + public MPartition getPartition() { + return partition; + } + + public void setPartition(MPartition partition) { + this.partition = partition; + } + +} Index: metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityUser.java =================================================================== --- metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityUser.java (revision 0) +++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityUser.java (revision 0) @@ -0,0 +1,127 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.model; + +/** + * User global level privileges + */ +public class MSecurityUser { + + private String principalName; + + private boolean isRole; + + private boolean isGroup; + + private String privileges; + + private int createTime; + + private String grantor; + + public MSecurityUser() { + super(); + } + + public MSecurityUser(String userName, boolean isRole, boolean isGroup, + String dbPrivileges, int createTime, String grantor) { + super(); + this.principalName = userName; + this.isRole = isRole; + this.isGroup = isGroup; + this.privileges = dbPrivileges; + this.createTime = createTime; + this.grantor = grantor; + } + + /** + * @return is this user name a role name? + */ + public boolean getIsRole() { + return isRole; + } + + /** + * @param isRole this user name is a role name? + */ + public void setIsRole(boolean isRole) { + this.isRole = isRole; + } + + /** + * @return is this user name a group name? + */ + public boolean getIsGroup() { + return isGroup; + } + + /** + * @param isGroup this user name a group name + */ + public void setIsGroup(boolean isGroup) { + this.isGroup = isGroup; + } + + /** + * @return a set of global privileges granted to this user + */ + public String getPrivileges() { + return privileges; + } + + /** + * @param dbPrivileges set of global privileges to user + */ + public void setPrivileges(String dbPrivileges) { + this.privileges = dbPrivileges; + } + + public void setRole(boolean isRole) { + this.isRole = isRole; + } + + public void setGroup(boolean isGroup) { + this.isGroup = isGroup; + } + + public String getPrincipalName() { + return principalName; + } + + public void setPrincipalName(String principalName) { + this.principalName = principalName; + } + + public int getCreateTime() { + return createTime; + } + + public void setCreateTime(int createTime) { + this.createTime = createTime; + } + + public String getGrantor() { + return grantor; + } + + public void setGrantor(String grantor) { + this.grantor = grantor; + } + +} Index: metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityUserRoleMap.java =================================================================== --- metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityUserRoleMap.java (revision 0) +++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MSecurityUserRoleMap.java (revision 0) @@ -0,0 +1,110 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.model; + +public class MSecurityUserRoleMap { + + private String principalName; + + private boolean isRole; + + private boolean isGroup; + + private MSecurityRoleEntity role; + + private int addTime; + + public MSecurityUserRoleMap() { + } + + public MSecurityUserRoleMap(String principalName, boolean isRole, + boolean isGroup, MSecurityRoleEntity role, int addTime) { + super(); + this.principalName = principalName; + this.isRole = isRole; + this.isGroup = isGroup; + this.role = role; + this.addTime = addTime; + } + + /** + * @return principal name + */ + public String getPrincipalName() { + return principalName; + } + + /** + * @param userName principal name + */ + public void setPrincipalName(String userName) { + this.principalName = userName; + } + + /** + * @return is role + */ + public boolean isRole() { + return isRole; + } + + /** + * @param isRole + */ + public void setRole(boolean isRole) { + this.isRole = isRole; + } + + /** + * @return is group + */ + public boolean isGroup() { + return isGroup; + } + + /** + * @param isGroup + */ + public void setGroup(boolean isGroup) { + this.isGroup = isGroup; + } + + /** + * @return add time + */ + public int getAddTime() { + return addTime; + } + + /** + * @param addTime + */ + public void setAddTime(int addTime) { + this.addTime = addTime; + } + + public MSecurityRoleEntity getRole() { + return role; + } + + public void setRole(MSecurityRoleEntity role) { + this.role = role; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 1033775) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -27,6 +27,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -48,6 +49,7 @@ import org.apache.hadoop.hive.ql.exec.ExecDriver; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.TaskResult; @@ -65,20 +67,27 @@ import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject; import org.apache.hadoop.hive.ql.lockmgr.LockException; import org.apache.hadoop.hive.ql.metadata.DummyPartition; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.ErrorMsg; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContextImpl; +import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.ParseDriver; import org.apache.hadoop.hive.ql.parse.ParseException; import org.apache.hadoop.hive.ql.parse.ParseUtils; +import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; +import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory; import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; @@ -371,8 +380,18 @@ if (plan.getFetchTask() != null) { plan.getFetchTask().initialize(conf, plan, null); } + + //do the authorization check + if (HiveConf.getBoolVar(conf, + HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) { + boolean pass = doAuthorization(sem); + if (!pass) { + console.printError("Authrization failed (not enough privileges found to run the query.)."); + return (400); + } + } - return (0); + return 0; } catch (SemanticException e) { errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage(); SQLState = ErrorMsg.findSQLState(e.getMessage()); @@ -394,6 +413,127 @@ } } + private boolean doAuthorization(BaseSemanticAnalyzer sem) + throws HiveException { + HashSet inputs = sem.getInputs(); + HashSet outputs = sem.getOutputs(); + SessionState ss = SessionState.get(); + HiveOperation op = ss.getHiveOperation(); + Hive db = sem.getDb(); + boolean pass = true; + if (op != null) { + if (outputs != null && outputs.size() > 0) { + if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)) { + pass = ss.getAuthorizer().authorize( + db.getDatabase(db.getCurrentDatabase()), null, + HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); + } else { + for (WriteEntity write : outputs) { + if (write.getPartition() != null) { + pass = pass + && ss.getAuthorizer().authorize(write.getPartition(), null, + op.getOutputRequiredPrivileges()); + } else if (write.getTable() != null) { + pass = pass + && ss.getAuthorizer().authorize(write.getTable(), null, + op.getOutputRequiredPrivileges()); + } + if (!pass) { + break; + } + } + } + } + + if (pass && inputs != null && inputs.size() > 0) { + + Map> tab2Cols = new HashMap>(); + Map> part2Cols = new HashMap>(); + + for (ReadEntity read : inputs) { + boolean part = read.getPartition() != null; + if (part) { + part2Cols.put(read.getPartition(), new ArrayList()); + } else { + tab2Cols.put(read.getTable(), new ArrayList()); + } + } + + if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) + || op.equals(HiveOperation.QUERY)) { + SemanticAnalyzer querySem = (SemanticAnalyzer) sem; + ParseContext parseCtx = querySem.getParseContext(); + Map tsoTopMap = parseCtx.getTopToTable(); + + for (Map.Entry> topOpMap : querySem + .getParseContext().getTopOps().entrySet()) { + Operator topOp = topOpMap.getValue(); + if (topOp instanceof TableScanOperator + && tsoTopMap.containsKey(topOp)) { + TableScanOperator tableScanOp = (TableScanOperator) topOp; + Table tbl = tsoTopMap.get(tableScanOp); + List neededColumnIds = tableScanOp.getNeededColumnIDs(); + List columns = tbl.getCols(); + List cols = new ArrayList(); + if (neededColumnIds != null && neededColumnIds.size() > 0) { + for (int i = 0; i < neededColumnIds.size(); i++) { + cols.add(columns.get(neededColumnIds.get(i)).getName()); + } + } else { + for (int i = 0; i < columns.size(); i++) { + cols.add(columns.get(i).getName()); + } + } + if (tbl.isPartitioned()) { + String alias_id = topOpMap.getKey(); + PrunedPartitionList partsList = PartitionPruner.prune(parseCtx + .getTopToTable().get(topOp), parseCtx.getOpToPartPruner() + .get(topOp), parseCtx.getConf(), alias_id, parseCtx + .getPrunedPartitions()); + Set parts = new HashSet(); + parts.addAll(partsList.getConfirmedPartns()); + parts.addAll(partsList.getUnknownPartns()); + for (Partition part : parts) { + part2Cols.put(part, cols); + } + } else { + tab2Cols.put(tbl, cols); + } + } + } + } + + for (ReadEntity read : inputs) { + if (read.getPartition() != null) { + List cols = part2Cols.get(read.getPartition()); + if (cols != null && cols.size() > 0) { + pass = pass + && ss.getAuthorizer().authorize( + read.getPartition().getTable(), read.getPartition(), + cols, op.getInputRequiredPrivileges(), null); + } else { + pass = pass + && ss.getAuthorizer().authorize(read.getPartition(), + op.getInputRequiredPrivileges(), null); + } + } else if (read.getTable() != null) { + List cols = tab2Cols.get(read.getTable()); + if (cols != null && cols.size() > 0) { + pass = pass + && ss.getAuthorizer().authorize(read.getTable(), null, cols, + op.getInputRequiredPrivileges(), null); + } else { + pass = pass + && ss.getAuthorizer().authorize(read.getTable(), + op.getInputRequiredPrivileges(), null); + } + } + } + } + } + return pass; + } + /** * @return The current query plan associated with this Driver, if any. */ @@ -639,7 +779,7 @@ releaseLocks(ctx.getHiveLocks()); return new CommandProcessorResponse(ret, errorMessage, SQLState); } - + ret = acquireReadWriteLocks(); if (ret != 0) { releaseLocks(ctx.getHiveLocks()); Index: ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java (revision 1030336) +++ ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java (working copy) @@ -99,7 +99,12 @@ fetchTask = sem.getFetchTask(); // Note that inputs and outputs can be changed when the query gets executed inputs = sem.getInputs(); - outputs = sem.getOutputs(); + outputs = new HashSet(); + for (WriteEntity write : sem.getOutputs()) { + if (write.isComplete()) { + outputs.add(write); + } + } linfo = sem.getLineageInfo(); idToTableNameMap = new HashMap(sem.getIdToTableNameMap()); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 1033775) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -33,14 +33,15 @@ import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; +import java.util.Map.Entry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -56,12 +57,18 @@ import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.ColumnPrivilegeBag; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.SecurityColumn; +import org.apache.hadoop.hive.metastore.api.SecurityDB; +import org.apache.hadoop.hive.metastore.api.SecurityTablePartition; +import org.apache.hadoop.hive.metastore.api.SecurityUser; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; @@ -82,7 +89,6 @@ import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.AddPartitionDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc; -import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; @@ -95,17 +101,27 @@ import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc; import org.apache.hadoop.hive.ql.plan.DropIndexDesc; import org.apache.hadoop.hive.ql.plan.DropTableDesc; +import org.apache.hadoop.hive.ql.plan.GrantDesc; +import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL; import org.apache.hadoop.hive.ql.plan.LockTableDesc; import org.apache.hadoop.hive.ql.plan.MsckDesc; +import org.apache.hadoop.hive.ql.plan.PrincipalDesc; +import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; +import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; +import org.apache.hadoop.hive.ql.plan.RevokeDesc; +import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; +import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; import org.apache.hadoop.hive.ql.plan.ShowLocksDesc; import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc; import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc; import org.apache.hadoop.hive.ql.plan.ShowTablesDesc; import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; +import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; import org.apache.hadoop.hive.ql.plan.api.StageType; +import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe; @@ -116,6 +132,7 @@ import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; /** * DDLTask implementation. @@ -280,6 +297,33 @@ if (showParts != null) { return showPartitions(db, showParts); } + + RoleDDLDesc roleDDLDesc = work.getRoleDDLDesc(); + if (roleDDLDesc != null) { + return roleDDL(roleDDLDesc); + } + + GrantDesc grantDesc = work.getGrantDesc(); + if (grantDesc != null) { + return grantOrRevokePrivileges(grantDesc.getPrincipals(), grantDesc.getPrivileges(), grantDesc.getPrivilegeSubjectDesc(), true); + } + + RevokeDesc revokeDesc = work.getRevokeDesc(); + if (revokeDesc != null) { + return grantOrRevokePrivileges(revokeDesc.getPrincipals(), revokeDesc + .getPrivileges(), revokeDesc.getPrivilegeSubjectDesc(), false); + } + + ShowGrantDesc showGrantDesc = work.getShowGrantDesc(); + if (showGrantDesc != null) { + return showGrants(showGrantDesc); + } + + GrantRevokeRoleDDL grantOrRevokeRoleDDL = work.getGrantRevokeRoleDDL(); + if (grantOrRevokeRoleDDL != null) { + return grantOrRevokeRole(grantOrRevokeRoleDDL); + } + } catch (InvalidTableException e) { console.printError("Table " + e.getTableName() + " does not exist"); @@ -299,6 +343,423 @@ return 0; } + private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL) + throws HiveException { + try { + boolean grantRole = grantOrRevokeRoleDDL.getGrant(); + List principals = grantOrRevokeRoleDDL.getPrincipalDesc(); + List roles = grantOrRevokeRoleDDL.getRoles(); + for (PrincipalDesc principal : principals) { + String userName = principal.getName(); + boolean isRole = principal.getType() == PrincipalDesc.PrincipalType.ROLE; + boolean isGroup = principal.getType() == PrincipalDesc.PrincipalType.GROUP; + for (String roleName : roles) { + if (grantRole) { + db.addRoleMember(roleName, userName, isRole, isGroup); + } else { + db.removeRoleMember(roleName, userName, isRole, isGroup); + } + } + } + } catch (Exception e) { + e.printStackTrace(); + throw new HiveException(e); + } + return 0; + } + + private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException { + try { + Path resFile = new Path(showGrantDesc.getResFile()); + FileSystem fs = resFile.getFileSystem(conf); + DataOutput outStream = fs.create(resFile); + PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc(); + boolean isGroup = (principalDesc.getType()== PrincipalDesc.PrincipalType.GROUP); + boolean isRole = (principalDesc.getType()== PrincipalDesc.PrincipalType.ROLE); + PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj(); + String principalName = principalDesc.getName(); + if (hiveObjectDesc == null) { + //show user level privileges + List users = db.showUserLevelGrant(principalName, + isGroup, isRole); + if (users != null && users.size() > 0) { + boolean first = true; + for(SecurityUser usr: users) { + if (!first) { + outStream.write(terminator); + } else { + first = false; + } + outStream.writeBytes("userName:" + principalName); + outStream.write(terminator); + outStream.writeBytes("isRole:" + isRole); + outStream.write(terminator); + outStream.writeBytes("isGroup:" + isGroup); + outStream.write(terminator); + outStream.writeBytes("privileges:" + usr.getPrivileges()); + outStream.write(terminator); + outStream.writeBytes("grantTime:" + usr.getCreateTime()); + outStream.write(terminator); + outStream.writeBytes("grantor:" + usr.getGrantor()); + outStream.write(terminator); + } + } + } else { + String obj = hiveObjectDesc.getObject(); + boolean notFound = true; + String dbName = null; + String tableName = null; + Table tableObj = null; + Database dbObj = null; + + if (hiveObjectDesc.getTable()) { + String[] dbTab = obj.split("\\."); + if (dbTab.length == 2) { + dbName = dbTab[0]; + tableName = dbTab[1]; + } else { + dbName = db.getCurrentDatabase(); + tableName = obj; + } + dbObj = db.getDatabase(dbName); + tableObj = db.getTable(dbName, tableName); + notFound = (dbObj == null || tableObj == null); + } else { + dbName = hiveObjectDesc.getObject(); + dbObj = db.getDatabase(dbName); + notFound = (dbObj == null); + } + if (notFound) { + throw new HiveException(obj + " can not be found"); + } + + if (!hiveObjectDesc.getTable()) { + // show database level privileges + List dbs = db.showDBLevelGrant(principalName, isGroup, + isRole, dbName); + if (dbs != null && dbs.size() > 0) { + boolean first = true; + for(SecurityDB db: dbs) { + if (!first) { + outStream.write(terminator); + } else { + first = false; + } + outStream.writeBytes("dbName:" + dbName); + outStream.write(terminator); + outStream.writeBytes("userName:" + principalName); + outStream.write(terminator); + outStream.writeBytes("isRole:" + isRole); + outStream.write(terminator); + outStream.writeBytes("isGroup:" + isGroup); + outStream.write(terminator); + outStream.writeBytes("privileges:" + db.getPrivileges()); + outStream.write(terminator); + outStream.writeBytes("grantTime:" + db.getCreateTime()); + outStream.write(terminator); + outStream.writeBytes("grantor:" + db.getGrantor()); + outStream.write(terminator); + } + } + + } else { + if (hiveObjectDesc.getPartSpec() != null) { + // show partition level privileges + List parts = db.showPartitionGrant(principalName, + isGroup, isRole, dbName, tableName, Warehouse.makePartName(hiveObjectDesc.getPartSpec(), false)); + if (parts != null && parts.size() > 0) { + boolean first = true; + for(SecurityTablePartition part: parts) { + if (!first) { + outStream.write(terminator); + } else { + first = false; + } + outStream.writeBytes("dbName:" + dbName); + outStream.write(terminator); + outStream.writeBytes("tableName:" + tableName); + outStream.write(terminator); + outStream.writeBytes("partName:" + + Warehouse.makePartName(part.getTable().getPartitionKeys(), part + .getPart().getValues())); + outStream.write(terminator); + outStream.writeBytes("userName:" + principalName); + outStream.write(terminator); + outStream.writeBytes("isRole:" + isRole); + outStream.write(terminator); + outStream.writeBytes("isGroup:" + isGroup); + outStream.write(terminator); + outStream.writeBytes("privileges:" + part.getPrivileges()); + outStream.write(terminator); + outStream.writeBytes("grantTime:" + part.getCreateTime()); + outStream.write(terminator); + outStream.writeBytes("grantor:" + part.getGrantor()); + outStream.write(terminator); + } + } + } else if (showGrantDesc.getColumns() != null) { + // show column level privileges + for (String columnName : showGrantDesc.getColumns()) { + List columnss = db.showColumnGrant(principalName, + isGroup, isRole, dbName, tableName, columnName); + if (columnss != null && columnss.size() > 0) { + boolean first = true; + for (SecurityColumn col : columnss) { + if (!first) { + outStream.write(terminator); + } else { + first = false; + } + outStream.writeBytes("dbName:" + dbName); + outStream.write(terminator); + outStream.writeBytes("tableName:" + tableName); + outStream.write(terminator); + outStream.writeBytes("columnName:" + columnName); + outStream.write(terminator); + outStream.writeBytes("userName:" + principalName); + outStream.write(terminator); + outStream.writeBytes("isRole:" + isRole); + outStream.write(terminator); + outStream.writeBytes("isGroup:" + isGroup); + outStream.write(terminator); + outStream.writeBytes("privileges:" + col.getPrivileges()); + outStream.write(terminator); + outStream.writeBytes("grantTime:" + col.getCreateTime()); + outStream.write(terminator); + outStream.writeBytes("grantor:" + col.getGrantor()); + outStream.write(terminator); + } + } + } + } else { + // show table level privileges + List tbls = db.showTableLevelGrant( + principalName, isGroup, isRole, dbName, tableName); + if (tbls!= null && tbls.size() > 0) { + boolean first = true; + for(SecurityTablePartition tbl: tbls) { + if (!first) { + outStream.write(terminator); + } else { + first = false; + } + outStream.writeBytes("dbName:" + dbName); + outStream.write(terminator); + outStream.writeBytes("tableName:" + tableName); + outStream.write(terminator); + outStream.writeBytes("userName:" + principalName); + outStream.write(terminator); + outStream.writeBytes("isRole:" + isRole); + outStream.write(terminator); + outStream.writeBytes("isGroup:" + isGroup); + outStream.write(terminator); + outStream.writeBytes("privileges:" + tbl.getPrivileges()); + outStream.write(terminator); + outStream.writeBytes("grantTime:" + tbl.getCreateTime()); + outStream.write(terminator); + outStream.writeBytes("grantor:" + tbl.getGrantor()); + outStream.write(terminator); + } + } + } + } + } + ((FSDataOutputStream) outStream).close(); + } catch (FileNotFoundException e) { + LOG.info("show table status: " + stringifyException(e)); + return 1; + } catch (IOException e) { + LOG.info("show table status: " + stringifyException(e)); + return 1; + } catch (Exception e) { + e.printStackTrace(); + throw new HiveException(e); + } + return 0; + } + + private int grantOrRevokePrivileges(List principals, + List privileges, PrivilegeObjectDesc privSubjectDesc, + boolean grant) { + if (privileges == null || privileges.size() == 0) { + console.printError("No privilege found."); + return 1; + } + + String dbName = null; + String tableName = null; + Table tableObj = null; + Database dbObj = null; + + try { + + if (privSubjectDesc != null) { + if (privSubjectDesc.getPartSpec() != null && grant) { + throw new HiveException("Grant does not support partition level."); + } + String obj = privSubjectDesc.getObject(); + boolean notFound = true; + if (privSubjectDesc.getTable()) { + String[] dbTab = obj.split("\\."); + if (dbTab.length == 2) { + dbName = dbTab[0]; + tableName = dbTab[1]; + } else { + dbName = db.getCurrentDatabase(); + tableName = obj; + } + dbObj = db.getDatabase(dbName); + tableObj = db.getTable(dbName, tableName); + notFound = (dbObj == null || tableObj == null); + } else { + dbName = privSubjectDesc.getObject(); + dbObj = db.getDatabase(dbName); + notFound = (dbObj == null); + } + if (notFound) { + throw new HiveException(obj + " can not be found"); + } + } + + PrivilegeBag privBag = new PrivilegeBag(); + String userPrivs = ""; + if (privSubjectDesc == null) { + boolean first = true; + for (int idx = 0; idx < privileges.size(); idx++) { + Privilege priv = privileges.get(idx).getPrivilege(); + if (privileges.get(idx).getColumns() != null + && privileges.get(idx).getColumns().size() > 0) { + throw new HiveException( + "For user-level privielges, column sets should be null. columns=" + + privileges.get(idx).getColumns().toString()); + } + if (!first) { + userPrivs = userPrivs + StringUtils.COMMA; + } else { + first = false; + } + userPrivs = userPrivs + priv.getPriv(); + } + privBag.setUserPrivileges(userPrivs); + } else { + List columnPrivBags = new ArrayList(); + Map dbPrivs = new HashMap(); + Map tabPrivs = new HashMap(); + Map partPrivs = new HashMap(); + org.apache.hadoop.hive.metastore.api.Partition partObj = null; + if (privSubjectDesc.getPartSpec() != null) { + partObj = db.getPartition(tableObj, privSubjectDesc.getPartSpec(), false).getTPartition(); + } + + for (PrivilegeDesc privDesc : privileges) { + List columns = privDesc.getColumns(); + Privilege priv = privDesc.getPrivilege(); + if (columns != null && columns.size() > 0) { + if (!priv.supportColumnLevel()) { + throw new HiveException(priv.getPriv() + + " does not support column level."); + } + if (privSubjectDesc == null || tableName == null + || (privSubjectDesc.getPartSpec() != null)) { + throw new HiveException( + "For user-level/database-level/partition privielges, column sets should be null. columns=" + + columns); + } + Map columnPrivileges = new HashMap(); + for (int i = 0; i < columns.size(); i++) { + columnPrivileges.put(columns.get(i), priv.getPriv()); + } + ColumnPrivilegeBag columnPrivBag = new ColumnPrivilegeBag(dbName, + tableName, columnPrivileges); + columnPrivBags.add(columnPrivBag); + } else { + if (privSubjectDesc.getTable()) { + if (privSubjectDesc.getPartSpec() != null && !grant) { + partPrivs.put(partObj, priv.getPriv()); + } else { + tabPrivs.put(tableObj.getTTable(), priv.getPriv()); + } + } else { + dbPrivs.put(dbObj, priv.getPriv()); + } + } + } + + if (columnPrivBags.size() > 0) { + privBag.setColumnPrivileges(columnPrivBags); + } + if (tabPrivs.size() > 0) { + privBag.setTablePrivileges(tabPrivs); + } + + if (partPrivs != null && partPrivs.size() > 0) { + privBag.setPartitionPrivileges(partPrivs); + } + if (dbPrivs.size() > 0) { + privBag.setDbPrivileges(dbPrivs); + } + } + + for (PrincipalDesc principal : principals) { + boolean isRole = (principal.getType() == PrincipalDesc.PrincipalType.ROLE); + boolean isGroup = (principal.getType() == PrincipalDesc.PrincipalType.GROUP); + if (grant) { + db + .grantPrivileges(principal.getName(), isRole, isGroup, privBag, + ""); + } else { + db + .revokePrivileges(principal.getName(), isRole, isGroup, privBag, + ""); + } + } + } catch (HiveException e) { + e.printStackTrace(); + console.printError("Error: " + e.getMessage()); + return 1; + } + + return 0; + } + + private int roleDDL(RoleDDLDesc roleDDLDesc) { + RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation(); + try { + if (operation.equals(RoleDDLDesc.RoleOperation.CREATE_ROLE)) { + db.createRole(roleDDLDesc.getName()); + } else if (operation.equals(RoleDDLDesc.RoleOperation.DROP_ROLE)) { + db.dropRole(roleDDLDesc.getName()); + } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT)) { + List roles = db.showRoleGrant(roleDDLDesc.getName(), + roleDDLDesc.getRole(), roleDDLDesc.getGroup()); + if (roles != null && roles.size() > 0) { + Path resFile = new Path(roleDDLDesc.getResFile()); + FileSystem fs = resFile.getFileSystem(conf); + DataOutput outStream = fs.create(resFile); + for (String role : roles) { + outStream.writeBytes(role); + outStream.write(terminator); + } + ((FSDataOutputStream) outStream).close(); + } + } else { + throw new HiveException("Unkown role operation " + + operation.getOperationName()); + } + } catch (HiveException e) { + e.printStackTrace(); + console.printError("Error in role operation " + + operation.getOperationName() + " on role name " + + roleDDLDesc.getName() + ", error message " + e.getMessage()); + return 1; + } catch (IOException e) { + LOG.info("role ddl exception: " + stringifyException(e)); + return 1; + } + + return 0; + } + private int dropIndex(Hive db, DropIndexDesc dropIdx) throws HiveException { db.dropIndex(db.getCurrentDatabase(), dropIdx.getTableName(), dropIdx.getIndexName(), true); @@ -356,7 +817,7 @@ Partition part = db .getPartition(tbl, addPartitionDesc.getPartSpec(), false); - work.getOutputs().add(new WriteEntity(part)); + work.getOutputs().add(new WriteEntity(part, true)); return 0; } @@ -387,7 +848,7 @@ throw new HiveException("Uable to update table"); } work.getInputs().add(new ReadEntity(tbl)); - work.getOutputs().add(new WriteEntity(tbl)); + work.getOutputs().add(new WriteEntity(tbl, true)); } else { Partition part = db.getPartition(tbl, touchDesc.getPartSpec(), false); if (part == null) { @@ -399,7 +860,7 @@ throw new HiveException(e); } work.getInputs().add(new ReadEntity(part)); - work.getOutputs().add(new WriteEntity(part)); + work.getOutputs().add(new WriteEntity(part, true)); } return 0; } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (revision 1030336) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (working copy) @@ -174,7 +174,7 @@ .getTableName(), tbd.getReplace(), new Path(tbd.getTmpDir()), tbd.getHoldDDLTime()); if (work.getOutputs() != null) { - work.getOutputs().add(new WriteEntity(table)); + work.getOutputs().add(new WriteEntity(table, true)); } } else { LOG.info("Partition is: " + tbd.getPartitionSpec().toString()); @@ -210,7 +210,7 @@ for (LinkedHashMap partSpec: dp) { Partition partn = db.getPartition(table, partSpec, false); - WriteEntity enty = new WriteEntity(partn); + WriteEntity enty = new WriteEntity(partn, true); if (work.getOutputs() != null) { work.getOutputs().add(enty); } @@ -243,7 +243,7 @@ dc = new DataContainer(table.getTTable(), partn.getTPartition()); // add this partition to post-execution hook if (work.getOutputs() != null) { - work.getOutputs().add(new WriteEntity(partn)); + work.getOutputs().add(new WriteEntity(partn, true)); } } } Index: ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java (revision 1030336) +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java (working copy) @@ -57,6 +57,8 @@ * The directory if this is a directory. */ private String d; + + private boolean complete; /** * This is derived from t and p, but we need to serialize this field to make sure @@ -122,6 +124,20 @@ this.t = t; typ = Type.TABLE; name = computeName(); + this.complete = true; + } + + /** + * @param t + * @param complete + */ + public WriteEntity(Table t, boolean complete) { + d = null; + p = null; + this.t = t; + typ = Type.TABLE; + name = computeName(); + this.complete = complete; } /** @@ -136,6 +152,20 @@ t = p.getTable(); typ = Type.PARTITION; name = computeName(); + this.complete = true; + } + + /** + * @param p + * @param complete + */ + public WriteEntity(Partition p, boolean complete) { + d = null; + this.p = p; + t = p.getTable(); + typ = Type.PARTITION; + name = computeName(); + this.complete = complete; } /** @@ -241,5 +271,14 @@ public int hashCode() { return toString().hashCode(); } + + public boolean isComplete() { + return complete; + } + + public void setComplete(boolean complete) { + this.complete = complete; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (revision 1033775) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (working copy) @@ -58,11 +58,19 @@ import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Index; +import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.SecurityColumn; +import org.apache.hadoop.hive.metastore.api.SecurityDB; +import org.apache.hadoop.hive.metastore.api.SecurityTablePartition; +import org.apache.hadoop.hive.metastore.api.SecurityUser; import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.model.MSecurityUser; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; import org.apache.hadoop.hive.serde2.Deserializer; @@ -736,7 +744,7 @@ } catch (NoSuchObjectException e) { if (throwException) { LOG.error(StringUtils.stringifyException(e)); - throw new InvalidTableException("Table not found ", tableName); + throw new InvalidTableException("Table " + tableName + " not found ", tableName); } return null; } catch (Exception e) { @@ -876,6 +884,52 @@ } } + public boolean grantPrivileges(String userName, boolean isRole, + boolean isGroup, PrivilegeBag privileges, String grantor) + throws HiveException { + try { + return getMSC().grant_privileges(userName, isRole, isGroup, privileges, + grantor); + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * @param userName + * principal name + * @param isRole + * is the given principal name a role + * @param isGroup + * is the given principal name a group + * @param privileges + * a bag of privileges + * @return + * @throws HiveException + */ + public boolean revokePrivileges(String userName, boolean isRole, + boolean isGroup, PrivilegeBag privileges, String grantor) + throws HiveException { + try { + return getMSC().revoke_privileges(userName, isRole, isGroup, privileges); + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * @param dbName + * @return + * @throws HiveException + */ + public Database getDatabase(String dbName) throws HiveException { + try { + return getMSC().getDatabase(dbName); + } catch (Exception e) { + throw new HiveException(e); + } + } + /** * Query metadata to see if a database with the given name already exists. * @@ -1363,6 +1417,216 @@ public void setCurrentDatabase(String currentDatabase) { this.currentDatabase = currentDatabase; } + + public void createRole(String roleName) throws HiveException { + try { + getMSC().create_role(roleName, ""); + } catch (Exception e) { + throw new HiveException(e); + } + } + + public void dropRole(String roleName) throws HiveException { + try { + getMSC().drop_role(roleName); + } catch (Exception e) { + throw new HiveException(e); + } + } + + public List showRoleGrant(String principalName, boolean isRole, + boolean isGroup) throws HiveException { + try { + return getMSC().list_roles(principalName, isRole, isGroup); + } catch (Exception e) { + throw new HiveException(e); + } + } + + public boolean addRoleMember(String roleName, String userName, + boolean isRole, boolean isGroup) throws HiveException { + try { + return getMSC().add_role_member(roleName, userName, isRole, isGroup); + } catch (Exception e) { + throw new HiveException(e); + } + } + + public boolean removeRoleMember(String roleName, String userName, + boolean isRole, boolean isGroup) throws HiveException { + try { + return getMSC().remove_role_member(roleName, userName, isRole, isGroup); + } catch (Exception e) { + throw new HiveException(e); + } + } + + public List listRoles(String userName, boolean isRole, boolean isGroup) + throws HiveException { + try { + return getMSC().list_roles(userName, isRole, isGroup); + } catch (Exception e) { + throw new HiveException(e); + } + } + + public List showUserLevelGrant(String principalName, + boolean isRole, boolean isGroup) throws HiveException { + try { + return getMSC().list_security_user_grant(principalName, isRole, isGroup); + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * @param user_name + * user name + * @param group_names + * group names + * @return + */ + public PrincipalPrivilegeSet get_user_privilege_set(String user_name, + List group_names) throws HiveException { + try { + return getMSC().get_user_privilege_set(user_name, group_names); + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * @param db_name + * database name + * @param user_name + * user name + * @param group_names + * group names + * @return + */ + public PrincipalPrivilegeSet get_db_privilege_set(String db_name, + String user_name, List group_names) throws HiveException { + try { + return getMSC().get_db_privilege_set(db_name, user_name, group_names); + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * @param db_name + * db name + * @param table_name + * table name + * @param user_name + * user name + * @param group_names + * group names + * @return + */ + public PrincipalPrivilegeSet get_table_privilege_set(String db_name, + String table_name, String user_name, List group_names) + throws HiveException{ + try { + return getMSC().get_table_privilege_set(db_name, table_name, user_name, + group_names); + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * @param db_name + * db name + * @param table_name + * table name + * @param part_name + * partition name + * @param user_name + * user name + * @param group_names + * group names + * @return + */ + public PrincipalPrivilegeSet get_partition_privilege_set(String db_name, + String table_name, String part_name, String user_name, + List group_names) throws HiveException { + try { + return getMSC().get_partition_privilege_set(db_name, table_name, part_name, user_name, + group_names); + } catch (Exception e) { + throw new HiveException(e); + } + } + + /** + * @param db_name + * database name + * @param table_name + * table name + * @param part_name + * partition name + * @param column_name + * column name + * @param user_name + * user name + * @param group_names + * group names + * @return + */ + public PrincipalPrivilegeSet get_column_privilege_set(String db_name, + String table_name, String part_name, String column_name, + String user_name, List group_names) throws HiveException { + try { + return getMSC().get_column_privilege_set(db_name, table_name, part_name, + column_name, user_name, group_names); + } catch (Exception e) { + throw new HiveException(e); + } + } + + public List showDBLevelGrant(String principalName, + boolean isGroup, boolean isRole, String dbName) throws HiveException { + try { + return getMSC().list_security_db_grant(principalName, isGroup, isRole, + dbName); + } catch (Exception e) { + throw new HiveException(e); + } + } + + public List showTableLevelGrant( + String principalName, boolean isGroup, boolean isRole, String dbName, + String tableName) throws HiveException { + try { + return getMSC().list_security_table_grant(principalName, isGroup, + isRole, dbName, tableName); + } catch (Exception e) { + throw new HiveException(e); + } + } + + public List showPartitionGrant( + String principalName, boolean isGroup, boolean isRole, String dbName, + String tableName, String partName) throws HiveException { + try { + return getMSC().list_security_partition_grant(principalName, isGroup, + isRole, dbName, tableName, partName); + } catch (Exception e) { + throw new HiveException(e); + } + } + + public List showColumnGrant(String principal_name, + boolean isGroup, boolean isRole, String dbName, String tableName, + String columnName) throws HiveException { + try { + return getMSC().list_security_column_grant(principal_name, isGroup, + isRole, dbName, tableName, columnName); + } catch (Exception e) { + throw new HiveException(e); + } + } static private void checkPaths(FileSystem fs, FileStatus[] srcs, Path destf, boolean replace) throws HiveException { Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (revision 1033775) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (working copy) @@ -28,7 +28,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; -import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.UnionOperator; @@ -36,6 +35,7 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.parse.ParseContext; +import org.apache.hadoop.hive.ql.plan.HiveQueryReadWrite; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; @@ -242,6 +242,7 @@ * to the hooks. */ private Set outputs; + private HiveQueryReadWrite hiveInAndOut; public GenMRProcContext() { } @@ -265,6 +266,7 @@ * the set of input tables/partitions generated by the walk * @param outputs * the set of destinations generated by the walk + * @param hiveInAndOut */ public GenMRProcContext( HiveConf conf, @@ -273,7 +275,7 @@ List> mvTask, List> rootTasks, LinkedHashMap, GenMapRedCtx> mapCurrCtx, - Set inputs, Set outputs) { + Set inputs, Set outputs, HiveQueryReadWrite hiveInAndOut) { this.conf = conf; this.opTaskMap = opTaskMap; this.seenOps = seenOps; @@ -292,6 +294,7 @@ rootOps.addAll(parseCtx.getTopOps().values()); unionTaskMap = new HashMap(); mapJoinTaskMap = new HashMap, GenMRMapJoinCtx>(); + this.hiveInAndOut = hiveInAndOut; } /** @@ -493,11 +496,13 @@ unionTaskMap.put(op, uTask); } - public GenMRMapJoinCtx getMapJoinCtx(AbstractMapJoinOperator op) { + public GenMRMapJoinCtx getMapJoinCtx( + AbstractMapJoinOperator op) { return mapJoinTaskMap.get(op); } - public void setMapJoinCtx(AbstractMapJoinOperator op, GenMRMapJoinCtx mjCtx) { + public void setMapJoinCtx(AbstractMapJoinOperator op, + GenMRMapJoinCtx mjCtx) { mapJoinTaskMap.put(op, mjCtx); } @@ -529,4 +534,12 @@ public void setConf(HiveConf conf) { this.conf = conf; } + + public HiveQueryReadWrite getHiveInAndOut() { + return hiveInAndOut; + } + + public void setHiveInAndOut(HiveQueryReadWrite hiveInAndOut) { + this.hiveInAndOut = hiveInAndOut; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 1030336) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy) @@ -50,6 +50,7 @@ import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.plan.HiveQueryReadWrite; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe; @@ -71,6 +72,8 @@ protected Context ctx; protected HashMap idToTableNameMap; + + protected HiveQueryReadWrite hiveInAndOut; public static int HIVE_COLUMN_ORDER_ASC = 1; public static int HIVE_COLUMN_ORDER_DESC = 0; @@ -222,6 +225,7 @@ idToTableNameMap = new HashMap(); inputs = new LinkedHashSet(); outputs = new LinkedHashSet(); + hiveInAndOut = new HiveQueryReadWrite(); } catch (Exception e) { throw new SemanticException(e); } @@ -719,4 +723,8 @@ } return partSpec; } + + public Hive getDb() { + return db; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1033775) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -68,6 +68,13 @@ import org.apache.hadoop.hive.ql.plan.AlterTableSimpleDesc; import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc; import org.apache.hadoop.hive.ql.plan.CreateIndexDesc; +import org.apache.hadoop.hive.ql.plan.GrantDesc; +import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL; +import org.apache.hadoop.hive.ql.plan.PrincipalDesc; +import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; +import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; +import org.apache.hadoop.hive.ql.plan.RevokeDesc; +import org.apache.hadoop.hive.ql.plan.RoleDDLDesc; import org.apache.hadoop.hive.ql.plan.DDLWork; import org.apache.hadoop.hive.ql.plan.DescFunctionDesc; import org.apache.hadoop.hive.ql.plan.DescTableDesc; @@ -79,6 +86,7 @@ import org.apache.hadoop.hive.ql.plan.MsckDesc; import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc; import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc; +import org.apache.hadoop.hive.ql.plan.ShowGrantDesc; import org.apache.hadoop.hive.ql.plan.ShowLocksDesc; import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc; import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc; @@ -87,6 +95,8 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes; +import org.apache.hadoop.hive.ql.security.authorization.Privilege; +import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.mapred.TextInputFormat; @@ -242,11 +252,224 @@ analyzeDropDatabase(ast); } else if (ast.getToken().getType() == TOK_SWITCHDATABASE) { analyzeSwitchDatabase(ast); + } else if (ast.getToken().getType() == HiveParser.TOK_CREATEROLE) { + analyzeCreateRole(ast); + } else if (ast.getToken().getType() == HiveParser.TOK_DROPROLE) { + analyzeDropRole(ast); + } else if (ast.getToken().getType() == HiveParser.TOK_SHOW_ROLE_GRANT) { + ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); + analyzeShowRoleGrant(ast); + } else if (ast.getToken().getType() == HiveParser.TOK_GRANT_ROLE) { + anaylzeGrantRevokeRole(true, ast); + } else if (ast.getToken().getType() == HiveParser.TOK_REVOKE_ROLE) { + anaylzeGrantRevokeRole(false, ast); + } else if (ast.getToken().getType() == HiveParser.TOK_GRANT) { + analyzeGrant(ast); + } else if (ast.getToken().getType() == HiveParser.TOK_SHOW_GRANT) { + ctx.setResFile(new Path(ctx.getLocalTmpFileURI())); + analyzeShowGrant(ast); + } else if (ast.getToken().getType() == HiveParser.TOK_REVOKE) { + analyzeRevoke(ast); } else { throw new SemanticException("Unsupported command."); } } + private void anaylzeGrantRevokeRole(boolean grant, ASTNode ast) { + List principalDesc = analyzePrinciplaListDef( + (ASTNode) ast.getChild(0)); + List roles = new ArrayList(); + for (int i = 1; i < ast.getChildCount(); i++) { + roles.add(unescapeIdentifier(ast.getChild(i).getText())); + } + + GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(grant, roles, principalDesc); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + grantRevokeRoleDDL), conf)); + } + + private void analyzeShowGrant(ASTNode ast) throws SemanticException { + PrivilegeObjectDesc privHiveObj = null; + + ASTNode principal = (ASTNode) ast.getChild(0); + PrincipalDesc.PrincipalType type = null; + switch (principal.getType()) { + case HiveParser.TOK_USER: + type = PrincipalDesc.PrincipalType.USER; + break; + case HiveParser.TOK_GROUP: + type = PrincipalDesc.PrincipalType.GROUP; + break; + case HiveParser.TOK_ROLE: + type = PrincipalDesc.PrincipalType.ROLE; + break; + } + String principlaName = unescapeIdentifier(principal.getChild(0).getText()); + PrincipalDesc principalDesc = new PrincipalDesc(principlaName, type); + List cols = null; + if (ast.getChildCount() > 1) { + for (int i = 1; i < ast.getChildCount(); i++) { + ASTNode child = (ASTNode) ast.getChild(i); + if (child.getToken().getType() == HiveParser.TOK_PRIV_OBJECT) { + privHiveObj = analyzePrivilegeObject(child); + } else if (child.getToken().getType() == HiveParser.TOK_TABCOLNAME) { + cols = getColumnNames((ASTNode) child); + } + } + } + + if (privHiveObj == null && cols != null) { + throw new SemanticException( + "For user-level privielges, column sets should be null. columns=" + + cols.toString()); + } + + ShowGrantDesc showGrant = new ShowGrantDesc(ctx.getResFile().toString(), + principalDesc, privHiveObj, cols); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + showGrant), conf)); + } + + private void analyzeGrant(ASTNode ast) throws SemanticException { + List privilegeDesc = analyzePrivilegeListDef( + (ASTNode) ast.getChild(0)); + List principalDesc = analyzePrinciplaListDef( + (ASTNode) ast.getChild(1)); + boolean grantOption = false; + PrivilegeObjectDesc subjectObj = null; + + if (ast.getChildCount() > 2) { + for (int i = 2; i < ast.getChildCount(); i++) { + ASTNode astChild = (ASTNode) ast.getChild(i); + if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) { + grantOption = true; + } else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) { + subjectObj = analyzePrivilegeObject(astChild); + } + } + } + + GrantDesc grantDesc = new GrantDesc(subjectObj, privilegeDesc, principalDesc, grantOption); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + grantDesc), conf)); + } + + private void analyzeRevoke(ASTNode ast) throws SemanticException { + List privilegeDesc = analyzePrivilegeListDef( + (ASTNode) ast.getChild(0)); + List principalDesc = analyzePrinciplaListDef( + (ASTNode) ast.getChild(1)); + PrivilegeObjectDesc hiveObj = null; + if (ast.getChildCount() > 2) { + ASTNode astChild = (ASTNode) ast.getChild(2); + hiveObj = analyzePrivilegeObject(astChild); + } + + RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, hiveObj); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + revokeDesc), conf)); + } + + + private PrivilegeObjectDesc analyzePrivilegeObject(ASTNode ast) + throws SemanticException { + PrivilegeObjectDesc subject = new PrivilegeObjectDesc(); + subject.setTable(ast.getChild(0) != null); + subject.setObject(unescapeIdentifier(ast.getChild(1).getText())); + if (ast.getChildCount() > 2) { + ASTNode astChild = (ASTNode) ast.getChild(2); + if (astChild.getToken().getType() == HiveParser.TOK_PARTSPEC) { + subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(astChild)); + } + } + return subject; + } + + private List analyzePrinciplaListDef(ASTNode node) { + List principalList = new ArrayList(); + + for (int i = 0; i < node.getChildCount(); i++) { + ASTNode child = (ASTNode) node.getChild(i); + PrincipalDesc.PrincipalType type = null; + switch (child.getType()) { + case HiveParser.TOK_USER: + type = PrincipalDesc.PrincipalType.USER; + break; + case HiveParser.TOK_GROUP: + type = PrincipalDesc.PrincipalType.GROUP; + break; + case HiveParser.TOK_ROLE: + type = PrincipalDesc.PrincipalType.ROLE; + break; + } + String principlaName = unescapeIdentifier(child.getChild(0).getText()); + PrincipalDesc principalDesc = new PrincipalDesc(principlaName, type); + principalList.add(principalDesc); + } + + return principalList; + } + + private List analyzePrivilegeListDef(ASTNode node) + throws SemanticException { + List ret = new ArrayList(); + for (int i = 0; i < node.getChildCount(); i++) { + ASTNode privilegeDef = (ASTNode) node.getChild(i); + + String privilegeStr = unescapeIdentifier(privilegeDef.getChild(0) + .getText()); + Privilege privObj = PrivilegeRegistry.getPrivilege(privilegeStr); + if (privObj == null) { + throw new SemanticException("undefined privilege " + privilegeStr); + } + List cols = null; + if (privilegeDef.getChildCount() > 1) { + cols = getColumnNames((ASTNode) privilegeDef.getChild(1)); + } + PrivilegeDesc privilegeDesc = new PrivilegeDesc(privObj, cols); + ret.add(privilegeDesc); + } + return ret; + } + + private void analyzeCreateRole(ASTNode ast) { + String roleName = unescapeIdentifier(ast.getChild(0).getText()); + RoleDDLDesc createRoleDesc = new RoleDDLDesc(roleName, + RoleDDLDesc.RoleOperation.CREATE_ROLE); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + createRoleDesc), conf)); + } + + private void analyzeDropRole(ASTNode ast) { + String roleName = unescapeIdentifier(ast.getChild(0).getText()); + RoleDDLDesc createRoleDesc = new RoleDDLDesc(roleName, + RoleDDLDesc.RoleOperation.DROP_ROLE); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + createRoleDesc), conf)); + } + + private void analyzeShowRoleGrant(ASTNode ast) { + ASTNode child = (ASTNode) ast.getChild(0); + boolean isRole = false; + boolean isGroup = false; + switch (child.getType()) { + case HiveParser.TOK_USER: + break; + case HiveParser.TOK_GROUP: + isGroup = true; + break; + case HiveParser.TOK_ROLE: + isRole = true; + break; + } + String principalName = unescapeIdentifier(child.getChild(0).getText()); + RoleDDLDesc createRoleDesc = new RoleDDLDesc(principalName, isRole, isGroup, + RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT); + createRoleDesc.setResFile(ctx.getResFile().toString()); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), + createRoleDesc), conf)); + } + private void analyzeCreateDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); boolean ifNotExists = false; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (revision 1031914) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (working copy) @@ -188,6 +188,22 @@ TOK_LATERAL_VIEW; TOK_TABALIAS; TOK_ANALYZE; +TOK_CREATEROLE; +TOK_DROPROLE; +TOK_GRANT; +TOK_REVOKE; +TOK_SHOW_GRANT; +TOK_PRIVILEGE_LIST; +TOK_PRIVILEGE; +TOK_PRINCIPAL_NAME; +TOK_USER; +TOK_GROUP; +TOK_ROLE; +TOK_GRANT_WITH_OPTION; +TOK_PRIV_OBJECT; +TOK_GRANT_ROLE; +TOK_REVOKE_ROLE; +TOK_SHOW_ROLE_GRANT; } @@ -258,6 +274,14 @@ | analyzeStatement | lockStatement | unlockStatement + | createRoleStatement + | dropRoleStatement + | grantPrivileges + | revokePrivileges + | showGrants + | showRoleGrants + | grantRole + | revokeRole ; ifExists @@ -681,6 +705,107 @@ : KW_UNLOCK KW_TABLE Identifier partitionSpec? -> ^(TOK_UNLOCKTABLE Identifier partitionSpec?) ; +createRoleStatement +@init { msgs.push("create role"); } +@after { msgs.pop(); } + : KW_CREATE KW_ROLE roleName=Identifier + -> ^(TOK_CREATEROLE $roleName) + ; + +dropRoleStatement +@init {msgs.push("drop role");} +@after {msgs.pop();} + : KW_DROP KW_ROLE roleName=Identifier + -> ^(TOK_DROPROLE $roleName) + ; + +grantPrivileges +@init {msgs.push("grant privileges");} +@after {msgs.pop();} + : KW_GRANT privList=privilegeList + privilegeObject? + KW_TO principalSpecification + (KW_WITH withOption)? + -> ^(TOK_GRANT $privList principalSpecification privilegeObject? withOption?) + ; + +revokePrivileges +@init {msgs.push("revoke privileges");} +@afer {msgs.pop();} + : KW_REVOKE privilegeList privilegeObject? KW_FROM principalSpecification + -> ^(TOK_REVOKE privilegeList principalSpecification privilegeObject?) + ; + +grantRole +@init {msgs.push("grant role");} +@after {msgs.pop();} + : KW_GRANT KW_ROLE Identifier (COMMA Identifier)* KW_TO principalSpecification + -> ^(TOK_GRANT_ROLE principalSpecification Identifier+) + ; + +revokeRole +@init {msgs.push("grant role");} +@after {msgs.pop();} + : KW_REVOKE KW_ROLE Identifier (COMMA Identifier)* KW_FROM principalSpecification + -> ^(TOK_REVOKE_ROLE principalSpecification Identifier+) + ; + +showRoleGrants +@init {msgs.push("show grants");} +@after {msgs.pop();} + : KW_SHOW KW_ROLE KW_GRANT principalName + -> ^(TOK_SHOW_ROLE_GRANT principalName) + ; + +showGrants +@init {msgs.push("show grants");} +@after {msgs.pop();} + : KW_SHOW KW_GRANT principalName privilegeObject? (LPAREN cols=columnNameList RPAREN)? + -> ^(TOK_SHOW_GRANT principalName privilegeObject? $cols?) + ; + +privilegeObject +@init {msgs.push("privilege subject");} +@after {msgs.pop();} + : KW_ON (table=KW_TABLE|KW_DATABASE) Identifier partitionSpec? + -> ^(TOK_PRIV_OBJECT $table Identifier partitionSpec?) + ; + +privilegeList +@init {msgs.push("grant privilege list");} +@after {msgs.pop();} + : privlegeDef (COMMA privlegeDef)* + -> ^(TOK_PRIVILEGE_LIST privlegeDef+) + ; + +privlegeDef +@init {msgs.push("grant privilege");} +@after {msgs.pop();} + : Identifier (LPAREN cols=columnNameList RPAREN)? + -> ^(TOK_PRIVILEGE Identifier $cols?) + ; + +principalSpecification +@init { msgs.push("user/group/role name list"); } +@after { msgs.pop(); } + : principalName (COMMA principalName)* -> ^(TOK_PRINCIPAL_NAME principalName+) + ; + +principalName +@init {msgs.push("user|group|role name");} +@after {msgs.pop();} + : KW_USER Identifier -> ^(TOK_USER Identifier) + | KW_GROUP Identifier -> ^(TOK_GROUP Identifier) + | KW_ROLE Identifier -> ^(TOK_ROLE Identifier) + ; + +withOption +@init {msgs.push("grant with option");} +@after {msgs.pop();} + : KW_GRANT KW_OPTION + -> ^(TOK_GRANT_WITH_OPTION) + ; + metastoreCheck @init { msgs.push("metastore check statement"); } @after { msgs.pop(); } @@ -1923,6 +2048,10 @@ KW_COMPUTE: 'COMPUTE'; KW_STATISTICS: 'STATISTICS'; KW_USE: 'USE'; +KW_USER: 'USER'; +KW_ROLE: 'ROLE'; +KW_OPTION: 'OPTION'; + // Operators // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work. Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 1031676) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -18,8 +18,6 @@ package org.apache.hadoop.hive.ql.parse; -import static org.apache.hadoop.util.StringUtils.stringifyException; - import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; @@ -124,6 +122,7 @@ import org.apache.hadoop.hive.ql.plan.FilterDesc; import org.apache.hadoop.hive.ql.plan.ForwardDesc; import org.apache.hadoop.hive.ql.plan.GroupByDesc; +import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.JoinCondDesc; import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.LateralViewForwardDesc; @@ -887,7 +886,7 @@ } catch (HiveException e) { // Has to use full name to make sure it does not conflict with // org.apache.commons.lang.StringUtils - LOG.error(stringifyException(e)); + LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); throw new SemanticException(e.getMessage(), e); } } @@ -923,7 +922,7 @@ // an old SQL construct which has been eliminated in a later Hive // version, so we need to provide full debugging info to help // with fixing the view definition. - LOG.error(stringifyException(e)); + LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); StringBuilder sb = new StringBuilder(); sb.append(e.getMessage()); ErrorMsg.renderOrigin(sb, viewOrigin); @@ -3508,8 +3507,7 @@ // Here only register the whole table for post-exec hook if no DP present // in the case of DP, we will register WriteEntity in MoveTask when the // list of dynamically created partitions are known. - if ((dpCtx == null || dpCtx.getNumDPCols() == 0) && - !outputs.add(new WriteEntity(dest_tab))) { + if (outputs.add(new WriteEntity(dest_tab, false))) { throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES .getMsg(dest_tab.getTableName())); } @@ -3556,7 +3554,7 @@ ltd.setHoldDDLTime(true); } loadTableWork.add(ltd); - if (!outputs.add(new WriteEntity(dest_part))) { + if (!outputs.add(new WriteEntity(dest_part, true))) { throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES .getMsg(dest_tab.getTableName() + "@" + dest_part.getName())); } @@ -5844,7 +5842,7 @@ tsDesc.setStatsAggPrefix(k); // set up WritenEntity for replication - outputs.add(new WriteEntity(tab)); + outputs.add(new WriteEntity(tab, true)); // add WriteEntity for each matching partition if (tab.isPartitioned()) { @@ -5855,7 +5853,7 @@ if (partitions != null) { for (Partition partn : partitions) { // inputs.add(new ReadEntity(partn)); // is this needed at all? - outputs.add(new WriteEntity(partn)); + outputs.add(new WriteEntity(partn, true)); } } } @@ -6112,7 +6110,7 @@ } catch (HiveException e) { // Has to use full name to make sure it does not conflict with // org.apache.commons.lang.StringUtils - LOG.error(stringifyException(e)); + LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); throw new SemanticException(e.getMessage(), e); } @@ -6212,7 +6210,7 @@ new ArrayList>(), getParseContext(), mvTask, rootTasks, new LinkedHashMap, GenMapRedCtx>(), - inputs, outputs); + inputs, outputs, hiveInAndOut); // create a walker which walks the tree in a DFS manner while maintaining // the operator stack. @@ -7028,6 +7026,9 @@ storageFormat.storageHandler, shared.serdeProps, tblProps, ifNotExists); validateCreateTable(crtTblDesc); + // outputs is empty, which means this create table happens in the current + // database. + SessionState.get().setCommandType(HiveOperation.CREATETABLE); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblDesc), conf)); break; @@ -7035,6 +7036,7 @@ case CTLT: // create table like CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tableName, isExt, location, ifNotExists, likeTableName); + SessionState.get().setCommandType(HiveOperation.CREATETABLE); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), crtTblLikeDesc), conf)); break; @@ -7059,6 +7061,8 @@ tblProps, ifNotExists); qb.setTableDesc(crtTblDesc); + SessionState.get().setCommandType(HiveOperation.CREATETABLE_AS_SELECT); + return selectStmt; default: throw new SemanticException("Unrecognized command."); Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (revision 1030336) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (working copy) @@ -21,6 +21,7 @@ import java.util.HashMap; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.session.SessionState; /** @@ -29,60 +30,71 @@ */ public final class SemanticAnalyzerFactory { - static HashMap commandType = new HashMap(); - static HashMap tablePartitionCommandType = new HashMap(); + static HashMap commandType = new HashMap(); + static HashMap tablePartitionCommandType = new HashMap(); static { - commandType.put(HiveParser.TOK_EXPLAIN, "EXPLAIN"); - commandType.put(HiveParser.TOK_LOAD, "LOAD"); - commandType.put(HiveParser.TOK_CREATEDATABASE, "CREATEDATABASE"); - commandType.put(HiveParser.TOK_DROPDATABASE, "DROPDATABASE"); - commandType.put(HiveParser.TOK_SWITCHDATABASE, "SWITCHDATABASE"); - commandType.put(HiveParser.TOK_CREATETABLE, "CREATETABLE"); - commandType.put(HiveParser.TOK_DROPTABLE, "DROPTABLE"); - commandType.put(HiveParser.TOK_DESCTABLE, "DESCTABLE"); - commandType.put(HiveParser.TOK_DESCFUNCTION, "DESCFUNCTION"); - commandType.put(HiveParser.TOK_MSCK, "MSCK"); - commandType.put(HiveParser.TOK_ALTERTABLE_ADDCOLS, "ALTERTABLE_ADDCOLS"); - commandType.put(HiveParser.TOK_ALTERTABLE_REPLACECOLS, "ALTERTABLE_REPLACECOLS"); - commandType.put(HiveParser.TOK_ALTERTABLE_RENAMECOL, "ALTERTABLE_RENAMECOL"); - commandType.put(HiveParser.TOK_ALTERTABLE_RENAME, "ALTERTABLE_RENAME"); - commandType.put(HiveParser.TOK_ALTERTABLE_DROPPARTS, "ALTERTABLE_DROPPARTS"); - commandType.put(HiveParser.TOK_ALTERTABLE_ADDPARTS, "ALTERTABLE_ADDPARTS"); - commandType.put(HiveParser.TOK_ALTERTABLE_TOUCH, "ALTERTABLE_TOUCH"); - commandType.put(HiveParser.TOK_ALTERTABLE_ARCHIVE, "ALTERTABLE_ARCHIVE"); - commandType.put(HiveParser.TOK_ALTERTABLE_UNARCHIVE, "ALTERTABLE_UNARCHIVE"); - commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, "ALTERTABLE_PROPERTIES"); - commandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER, "ALTERTABLE_SERIALIZER"); - commandType.put(HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES, "ALTERTABLE_SERDEPROPERTIES"); - commandType.put(HiveParser.TOK_SHOWDATABASES, "SHOWDATABASES"); - commandType.put(HiveParser.TOK_SHOWTABLES, "SHOWTABLES"); - commandType.put(HiveParser.TOK_SHOW_TABLESTATUS, "SHOW_TABLESTATUS"); - commandType.put(HiveParser.TOK_SHOWFUNCTIONS, "SHOWFUNCTIONS"); - commandType.put(HiveParser.TOK_SHOWPARTITIONS, "SHOWPARTITIONS"); - commandType.put(HiveParser.TOK_SHOWLOCKS, "SHOWLOCKS"); - commandType.put(HiveParser.TOK_CREATEFUNCTION, "CREATEFUNCTION"); - commandType.put(HiveParser.TOK_DROPFUNCTION, "DROPFUNCTION"); - commandType.put(HiveParser.TOK_CREATEVIEW, "CREATEVIEW"); - commandType.put(HiveParser.TOK_DROPVIEW, "DROPVIEW"); - commandType.put(HiveParser.TOK_CREATEINDEX, "CREATEINDEX"); - commandType.put(HiveParser.TOK_DROPINDEX, "DROPINDEX"); - commandType.put(HiveParser.TOK_ALTERINDEX_REBUILD, "ALTERINDEX_REBUILD"); - commandType.put(HiveParser.TOK_ALTERVIEW_PROPERTIES, "ALTERVIEW_PROPERTIES"); - commandType.put(HiveParser.TOK_QUERY, "QUERY"); - commandType.put(HiveParser.TOK_LOCKTABLE, "LOCKTABLE"); - commandType.put(HiveParser.TOK_UNLOCKTABLE, "UNLOCKTABLE"); + commandType.put(HiveParser.TOK_EXPLAIN, HiveOperation.EXPLAIN); + commandType.put(HiveParser.TOK_LOAD, HiveOperation.LOAD); + commandType.put(HiveParser.TOK_CREATEDATABASE, HiveOperation.CREATEDATABASE); + commandType.put(HiveParser.TOK_DROPDATABASE, HiveOperation.DROPDATABASE); + commandType.put(HiveParser.TOK_SWITCHDATABASE, HiveOperation.SWITCHDATABASE); + commandType.put(HiveParser.TOK_CREATETABLE, HiveOperation.CREATETABLE); + commandType.put(HiveParser.TOK_DROPTABLE, HiveOperation.DROPTABLE); + commandType.put(HiveParser.TOK_DESCTABLE, HiveOperation.DESCTABLE); + commandType.put(HiveParser.TOK_DESCFUNCTION, HiveOperation.DESCFUNCTION); + commandType.put(HiveParser.TOK_MSCK, HiveOperation.MSCK); + commandType.put(HiveParser.TOK_ALTERTABLE_ADDCOLS, HiveOperation.ALTERTABLE_ADDCOLS); + commandType.put(HiveParser.TOK_ALTERTABLE_REPLACECOLS, HiveOperation.ALTERTABLE_REPLACECOLS); + commandType.put(HiveParser.TOK_ALTERTABLE_RENAMECOL, HiveOperation.ALTERTABLE_RENAMECOL); + commandType.put(HiveParser.TOK_ALTERTABLE_RENAME, HiveOperation.ALTERTABLE_RENAME); + commandType.put(HiveParser.TOK_ALTERTABLE_DROPPARTS, HiveOperation.ALTERTABLE_DROPPARTS); + commandType.put(HiveParser.TOK_ALTERTABLE_ADDPARTS, HiveOperation.ALTERTABLE_ADDPARTS); + commandType.put(HiveParser.TOK_ALTERTABLE_TOUCH, HiveOperation.ALTERTABLE_TOUCH); + commandType.put(HiveParser.TOK_ALTERTABLE_ARCHIVE, HiveOperation.ALTERTABLE_ARCHIVE); + commandType.put(HiveParser.TOK_ALTERTABLE_UNARCHIVE, HiveOperation.ALTERTABLE_UNARCHIVE); + commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES); + commandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER, HiveOperation.ALTERTABLE_SERIALIZER); + commandType.put(HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES, HiveOperation.ALTERTABLE_SERDEPROPERTIES); + commandType.put(HiveParser.TOK_SHOWDATABASES, HiveOperation.SHOWDATABASES); + commandType.put(HiveParser.TOK_SHOWTABLES, HiveOperation.SHOWTABLES); + commandType.put(HiveParser.TOK_SHOW_TABLESTATUS, HiveOperation.SHOW_TABLESTATUS); + commandType.put(HiveParser.TOK_SHOWFUNCTIONS, HiveOperation.SHOWFUNCTIONS); + commandType.put(HiveParser.TOK_SHOWPARTITIONS, HiveOperation.SHOWPARTITIONS); + commandType.put(HiveParser.TOK_SHOWLOCKS, HiveOperation.SHOWLOCKS); + commandType.put(HiveParser.TOK_CREATEFUNCTION, HiveOperation.CREATEFUNCTION); + commandType.put(HiveParser.TOK_DROPFUNCTION, HiveOperation.DROPFUNCTION); + commandType.put(HiveParser.TOK_CREATEVIEW, HiveOperation.CREATEVIEW); + commandType.put(HiveParser.TOK_DROPVIEW, HiveOperation.DROPVIEW); + commandType.put(HiveParser.TOK_CREATEINDEX, HiveOperation.CREATEINDEX); + commandType.put(HiveParser.TOK_DROPINDEX, HiveOperation.DROPINDEX); + commandType.put(HiveParser.TOK_ALTERINDEX_REBUILD, HiveOperation.ALTERINDEX_REBUILD); + commandType.put(HiveParser.TOK_ALTERVIEW_PROPERTIES, HiveOperation.ALTERVIEW_PROPERTIES); + commandType.put(HiveParser.TOK_QUERY, HiveOperation.QUERY); + commandType.put(HiveParser.TOK_LOCKTABLE, HiveOperation.LOCKTABLE); + commandType.put(HiveParser.TOK_UNLOCKTABLE, HiveOperation.UNLOCKTABLE); + commandType.put(HiveParser.TOK_CREATEROLE, HiveOperation.CREATEROLE); + commandType.put(HiveParser.TOK_DROPROLE, HiveOperation.DROPROLE); + commandType.put(HiveParser.TOK_GRANT, HiveOperation.GRANT_PRIVILEGE); + commandType.put(HiveParser.TOK_REVOKE, HiveOperation.REVOKE_PRIVILEGE); + commandType.put(HiveParser.TOK_SHOW_GRANT, HiveOperation.SHOW_GRANT); + commandType.put(HiveParser.TOK_GRANT_ROLE, HiveOperation.GRANT_ROLE); + commandType.put(HiveParser.TOK_REVOKE_ROLE, HiveOperation.REVOKE_ROLE); + commandType.put(HiveParser.TOK_SHOW_ROLE_GRANT, HiveOperation.SHOW_ROLE_GRANT); } static { - tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE, - new String[] { "ALTERTABLE_PROTECTMODE", "ALTERPARTITION_PROTECTMODE" }); + tablePartitionCommandType.put( + HiveParser.TOK_ALTERTABLE_ALTERPARTS_PROTECTMODE, + new HiveOperation[] { HiveOperation.ALTERTABLE_PROTECTMODE, + HiveOperation.ALTERPARTITION_PROTECTMODE }); tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_FILEFORMAT, - new String[] { "ALTERTABLE_FILEFORMAT", "ALTERPARTITION_FILEFORMAT" }); + new HiveOperation[] { HiveOperation.ALTERTABLE_FILEFORMAT, + HiveOperation.ALTERPARTITION_FILEFORMAT }); tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_LOCATION, - new String[] { "ALTERTABLE_LOCATION", "ALTERPARTITION_LOCATION" }); + new HiveOperation[] { HiveOperation.ALTERTABLE_LOCATION, + HiveOperation.ALTERPARTITION_LOCATION }); } - public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree) throws SemanticException { @@ -129,9 +141,17 @@ case HiveParser.TOK_ALTERTABLE_UNARCHIVE: case HiveParser.TOK_LOCKTABLE: case HiveParser.TOK_UNLOCKTABLE: + case HiveParser.TOK_CREATEROLE: + case HiveParser.TOK_DROPROLE: + case HiveParser.TOK_GRANT: + case HiveParser.TOK_REVOKE: + case HiveParser.TOK_SHOW_GRANT: + case HiveParser.TOK_GRANT_ROLE: + case HiveParser.TOK_REVOKE_ROLE: + case HiveParser.TOK_SHOW_ROLE_GRANT: return new DDLSemanticAnalyzer(conf); case HiveParser.TOK_ALTERTABLE_PARTITION: - String commandType = null; + HiveOperation commandType = null; Integer type = ((ASTNode) tree.getChild(1)).getToken().getType(); if (tree.getChild(0).getChildCount() > 1) { commandType = tablePartitionCommandType.get(type)[1]; @@ -149,7 +169,7 @@ } } - private static void setSessionCommandType(String commandType) { + private static void setSessionCommandType(HiveOperation commandType) { if (SessionState.get() != null) { SessionState.get().setCommandType(commandType); } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (revision 1030336) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (working copy) @@ -53,6 +53,12 @@ private AlterTableSimpleDesc alterTblSimpleDesc; private MsckDesc msckDesc; private ShowTableStatusDesc showTblStatusDesc; + + private RoleDDLDesc roleDDLDesc; + private GrantDesc grantDesc; + private ShowGrantDesc showGrantDesc; + private RevokeDesc revokeDesc; + private GrantRevokeRoleDDL grantRevokeRoleDDL; /** * ReadEntitites that are passed to the hooks. @@ -295,6 +301,36 @@ this.dropIdxDesc = dropIndexDesc; } + public DDLWork(HashSet inputs, HashSet outputs, + RoleDDLDesc roleDDLDesc) { + this(inputs, outputs); + this.roleDDLDesc = roleDDLDesc; + } + + public DDLWork(HashSet inputs, HashSet outputs, + GrantDesc grantDesc) { + this(inputs, outputs); + this.grantDesc = grantDesc; + } + + public DDLWork(HashSet inputs, HashSet outputs, + ShowGrantDesc showGrant) { + this(inputs, outputs); + this.showGrantDesc = showGrant; + } + + public DDLWork(HashSet inputs, HashSet outputs, + RevokeDesc revokeDesc) { + this(inputs, outputs); + this.revokeDesc = revokeDesc; + } + + public DDLWork(HashSet inputs, HashSet outputs, + GrantRevokeRoleDDL grantRevokeRoleDDL) { + this(inputs, outputs); + this.grantRevokeRoleDDL = grantRevokeRoleDDL; + } + /** * @return Create Database descriptor */ @@ -668,4 +704,68 @@ this.dropIdxDesc = dropIdxDesc; } + /** + * @return role ddl desc + */ + public RoleDDLDesc getRoleDDLDesc() { + return roleDDLDesc; + } + + /** + * @param roleDDLDesc role ddl desc + */ + public void setRoleDDLDesc(RoleDDLDesc roleDDLDesc) { + this.roleDDLDesc = roleDDLDesc; + } + + /** + * @return grant desc + */ + public GrantDesc getGrantDesc() { + return grantDesc; + } + + /** + * @param grantDesc grant desc + */ + public void setGrantDesc(GrantDesc grantDesc) { + this.grantDesc = grantDesc; + } + + /** + * @return show grant desc + */ + public ShowGrantDesc getShowGrantDesc() { + return showGrantDesc; + } + + /** + * @param showGrantDesc + */ + public void setShowGrantDesc(ShowGrantDesc showGrantDesc) { + this.showGrantDesc = showGrantDesc; + } + + public RevokeDesc getRevokeDesc() { + return revokeDesc; + } + + public void setRevokeDesc(RevokeDesc revokeDesc) { + this.revokeDesc = revokeDesc; + } + + /** + * @return + */ + public GrantRevokeRoleDDL getGrantRevokeRoleDDL() { + return grantRevokeRoleDDL; + } + + /** + * @param grantRevokeRoleDDL + */ + public void setGrantRevokeRoleDDL(GrantRevokeRoleDDL grantRevokeRoleDDL) { + this.grantRevokeRoleDDL = grantRevokeRoleDDL; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/GrantDesc.java (revision 0) @@ -0,0 +1,107 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; +import java.util.List; + +@Explain(displayName = "Grant") +public class GrantDesc extends DDLDesc implements Serializable, Cloneable { + + private static final long serialVersionUID = 1L; + + private List privileges; + + private List principals; + + private boolean grantOption; + + private PrivilegeObjectDesc privilegeSubjectDesc; + + public GrantDesc(PrivilegeObjectDesc privilegeSubject, + List privilegeDesc, List principalDesc, + boolean grantOption) { + super(); + this.privilegeSubjectDesc = privilegeSubject; + this.privileges = privilegeDesc; + this.principals = principalDesc; + this.grantOption = grantOption; + } + + /** + * @return privileges + */ + @Explain(displayName = "Privileges") + public List getPrivileges() { + return privileges; + } + + /** + * @param privileges + */ + public void setPrivileges(List privileges) { + this.privileges = privileges; + } + + /** + * @return principals + */ + @Explain(displayName = "Principals") + public List getPrincipals() { + return principals; + } + + /** + * @param principals + */ + public void setPrincipals(List principals) { + this.principals = principals; + } + + /** + * @return grant option + */ + @Explain(displayName = "grant option") + public boolean isGrantOption() { + return grantOption; + } + + /** + * @param grantOption + */ + public void setGrantOption(boolean grantOption) { + this.grantOption = grantOption; + } + + /** + * @return privilege subject + */ + @Explain(displayName="privilege subject") + public PrivilegeObjectDesc getPrivilegeSubjectDesc() { + return privilegeSubjectDesc; + } + + /** + * @param privilegeSubjectDesc + */ + public void setPrivilegeSubjectDesc(PrivilegeObjectDesc privilegeSubjectDesc) { + this.privilegeSubjectDesc = privilegeSubjectDesc; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/GrantRevokeRoleDDL.java (revision 0) @@ -0,0 +1,78 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.util.List; + +@Explain(displayName="grant or revoke roles") +public class GrantRevokeRoleDDL { + + private boolean grant; + + private List principalDesc; + + private List roles; + + public GrantRevokeRoleDDL() { + } + + public GrantRevokeRoleDDL(boolean grant, List roles, List principalDesc) { + super(); + this.grant = grant; + this.principalDesc = principalDesc; + this.roles = roles; + } + + /** + * @return grant or revoke privileges + */ + @Explain(displayName="grant (or revoke)") + public boolean getGrant() { + return grant; + } + + public void setGrant(boolean grant) { + this.grant = grant; + } + + /** + * @return a list of principals + */ + @Explain(displayName="principals") + public List getPrincipalDesc() { + return principalDesc; + } + + public void setPrincipalDesc(List principalDesc) { + this.principalDesc = principalDesc; + } + + /** + * @return a list of roles + */ + @Explain(displayName="roles") + public List getRoles() { + return roles; + } + + public void setRoles(List roles) { + this.roles = roles; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (revision 0) @@ -0,0 +1,180 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import org.apache.hadoop.hive.ql.security.authorization.Privilege; + +public enum HiveOperation { + + EXPLAIN("EXPLAIN", null, null), + LOAD("LOAD", null, new Privilege[]{Privilege.ALTER_DATA}), + CREATEDATABASE("CREATEDATABASE", null, null), + DROPDATABASE("DROPDATABASE", null, null), + SWITCHDATABASE("SWITCHDATABASE", null, null), + DROPTABLE ("DROPTABLE", null, new Privilege[]{Privilege.DROP}), + DESCTABLE("DESCTABLE", null, null), + DESCFUNCTION("DESCFUNCTION", null, null), + MSCK("MSCK", null, null), + ALTERTABLE_ADDCOLS("ALTERTABLE_ADDCOLS", new Privilege[]{Privilege.ALTER_METADATA}, null), + ALTERTABLE_REPLACECOLS("ALTERTABLE_REPLACECOLS", new Privilege[]{Privilege.ALTER_METADATA}, null), + ALTERTABLE_RENAMECOL("ALTERTABLE_RENAMECOL", new Privilege[]{Privilege.ALTER_METADATA}, null), + ALTERTABLE_RENAME("ALTERTABLE_RENAME", new Privilege[]{Privilege.ALTER_METADATA}, null), + ALTERTABLE_DROPPARTS("ALTERTABLE_DROPPARTS", new Privilege[]{Privilege.CREATE}, null), + ALTERTABLE_ADDPARTS("ALTERTABLE_ADDPARTS", new Privilege[]{Privilege.CREATE}, null), + ALTERTABLE_TOUCH("ALTERTABLE_TOUCH", null, null), + ALTERTABLE_ARCHIVE("ALTERTABLE_ARCHIVE", new Privilege[]{Privilege.ALTER_DATA}, null), + ALTERTABLE_UNARCHIVE("ALTERTABLE_UNARCHIVE", new Privilege[]{Privilege.ALTER_DATA}, null), + ALTERTABLE_PROPERTIES("ALTERTABLE_PROPERTIES", new Privilege[]{Privilege.ALTER_METADATA}, null), + ALTERTABLE_SERIALIZER("ALTERTABLE_SERIALIZER", new Privilege[]{Privilege.ALTER_METADATA}, null), + ALTERTABLE_SERDEPROPERTIES("ALTERTABLE_SERDEPROPERTIES", new Privilege[]{Privilege.ALTER_METADATA}, null), + SHOWDATABASES("SHOWDATABASES", new Privilege[]{Privilege.SHOW_DATABASE}, null), + SHOWTABLES("SHOWTABLES", null, null), + SHOW_TABLESTATUS("SHOW_TABLESTATUS", null, null), + SHOWFUNCTIONS("SHOWFUNCTIONS", null, null), + SHOWPARTITIONS("SHOWPARTITIONS", null, null), + SHOWLOCKS("SHOWLOCKS", null, null), + CREATEFUNCTION("CREATEFUNCTION", null, null), + DROPFUNCTION("DROPFUNCTION", null, null), + CREATEVIEW("CREATEVIEW", null, null), + DROPVIEW("DROPVIEW", null, null), + CREATEINDEX("CREATEINDEX", null, null), + DROPINDEX("DROPINDEX", null, null), + ALTERINDEX_REBUILD("ALTERINDEX_REBUILD", null, null), + ALTERVIEW_PROPERTIES("ALTERVIEW_PROPERTIES", null, null), + LOCKTABLE("LOCKTABLE", new Privilege[]{Privilege.LOCK}, null), + UNLOCKTABLE("UNLOCKTABLE", new Privilege[]{Privilege.LOCK}, null), + CREATEROLE("CREATEROLE", null, null), + DROPROLE("DROPROLE", null, null), + GRANT_PRIVILEGE("GRANT_PRIVILEGE", null, null), + REVOKE_PRIVILEGE("REVOKE_PRIVILEGE", null, null), + SHOW_GRANT("SHOW_GRANT", null, null), + GRANT_ROLE("GRANT_ROLE", null, null), + REVOKE_ROLE("REVOKE_ROLE", null, null), + SHOW_ROLE_GRANT("SHOW_ROLE_GRANT", null, null), + ALTERTABLE_PROTECTMODE("ALTERTABLE_PROTECTMODE", new Privilege[]{Privilege.ALTER_METADATA}, null), + ALTERPARTITION_PROTECTMODE("ALTERPARTITION_PROTECTMODE", new Privilege[]{Privilege.ALTER_METADATA}, null), + ALTERTABLE_FILEFORMAT("ALTERTABLE_FILEFORMAT", new Privilege[]{Privilege.ALTER_METADATA}, null), + ALTERPARTITION_FILEFORMAT("ALTERPARTITION_FILEFORMAT", new Privilege[]{Privilege.ALTER_METADATA}, null), + ALTERTABLE_LOCATION("ALTERTABLE_LOCATION", new Privilege[]{Privilege.ALTER_DATA}, null), + ALTERPARTITION_LOCATION("ALTERPARTITION_LOCATION", new Privilege[]{Privilege.ALTER_DATA}, null), + CREATETABLE("CREATETABLE", null, new Privilege[]{Privilege.CREATE}), + CREATETABLE_AS_SELECT("CREATETABLE_AS_SELECT", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.CREATE}), + QUERY("QUERY", new Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.ALTER_DATA}), + ; + + private String operationName; + + private Privilege[] inputRequiredPrivileges; + + private Privilege[] outputRequiredPrivileges; + + public Privilege[] getInputRequiredPrivileges() { + return inputRequiredPrivileges; + } + + public Privilege[] getOutputRequiredPrivileges() { + return outputRequiredPrivileges; + } + + public String getOperationName() { + return operationName; + } + + private HiveOperation(String operationName, + Privilege[] inputRequiredPrivileges, Privilege[] outputRequiredPrivileges) { + this.operationName = operationName; + this.inputRequiredPrivileges = inputRequiredPrivileges; + this.outputRequiredPrivileges = outputRequiredPrivileges; + } + + public static class PrivilegeAgreement { + + private Privilege[] inputUserLevelRequiredPriv; + private Privilege[] inputDBLevelRequiredPriv; + private Privilege[] inputTableLevelRequiredPriv; + private Privilege[] inputColumnLevelRequiredPriv; + private Privilege[] outputUserLevelRequiredPriv; + private Privilege[] outputDBLevelRequiredPriv; + private Privilege[] outputTableLevelRequiredPriv; + private Privilege[] outputColumnLevelRequiredPriv; + + public PrivilegeAgreement putUserLevelRequiredPriv( + Privilege[] inputUserLevelRequiredPriv, + Privilege[] outputUserLevelRequiredPriv) { + this.inputUserLevelRequiredPriv = inputUserLevelRequiredPriv; + this.outputUserLevelRequiredPriv = outputUserLevelRequiredPriv; + return this; + } + + public PrivilegeAgreement putDBLevelRequiredPriv( + Privilege[] inputDBLevelRequiredPriv, + Privilege[] outputDBLevelRequiredPriv) { + this.inputDBLevelRequiredPriv = inputDBLevelRequiredPriv; + this.outputDBLevelRequiredPriv = outputDBLevelRequiredPriv; + return this; + } + + public PrivilegeAgreement putTableLevelRequiredPriv( + Privilege[] inputTableLevelRequiredPriv, + Privilege[] outputTableLevelRequiredPriv) { + this.inputTableLevelRequiredPriv = inputTableLevelRequiredPriv; + this.outputTableLevelRequiredPriv = outputTableLevelRequiredPriv; + return this; + } + + public PrivilegeAgreement putColumnLevelRequiredPriv( + Privilege[] inputColumnLevelPriv, Privilege[] outputColumnLevelPriv) { + this.inputColumnLevelRequiredPriv = inputColumnLevelPriv; + this.outputColumnLevelRequiredPriv = outputColumnLevelPriv; + return this; + } + + public Privilege[] getInputUserLevelRequiredPriv() { + return inputUserLevelRequiredPriv; + } + + public Privilege[] getInputDBLevelRequiredPriv() { + return inputDBLevelRequiredPriv; + } + + public Privilege[] getInputTableLevelRequiredPriv() { + return inputTableLevelRequiredPriv; + } + + public Privilege[] getInputColumnLevelRequiredPriv() { + return inputColumnLevelRequiredPriv; + } + + public Privilege[] getOutputUserLevelRequiredPriv() { + return outputUserLevelRequiredPriv; + } + + public Privilege[] getOutputDBLevelRequiredPriv() { + return outputDBLevelRequiredPriv; + } + + public Privilege[] getOutputTableLevelRequiredPriv() { + return outputTableLevelRequiredPriv; + } + + public Privilege[] getOutputColumnLevelRequiredPriv() { + return outputColumnLevelRequiredPriv; + } + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/HiveQueryReadWrite.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/HiveQueryReadWrite.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/HiveQueryReadWrite.java (revision 0) @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; + +public class HiveQueryReadWrite { + + Map> opToInputList = new HashMap>(); + Map> opToOutputList = new HashMap>(); + + public void putInputForOperation(ReadEntity input, HiveOperation op) { + if (this.getOpToInputList().get(op) == null) { + this.getOpToInputList().put(op, new ArrayList()); + } + this.getOpToInputList().get(op).add(input); + } + + public void putOutputForOperation(WriteEntity output, HiveOperation op) { + if (this.getOpToOutputList().get(op) == null) { + this.getOpToOutputList().put(op, new ArrayList()); + } + this.getOpToOutputList().get(op).add(output); + } + + public Map> getOpToInputList() { + return opToInputList; + } + + public Map> getOpToOutputList() { + return opToOutputList; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PrincipalDesc.java (revision 0) @@ -0,0 +1,64 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +@Explain(displayName = "Principal") +public class PrincipalDesc implements Serializable, Cloneable { + + private static final long serialVersionUID = 1L; + + public static enum PrincipalType { + USER, GROUP, ROLE; + } + + private String name; + + private PrincipalType type; + + public PrincipalDesc(String name, PrincipalType type) { + super(); + this.name = name; + this.type = type; + } + + public PrincipalDesc() { + super(); + } + + @Explain(displayName="name") + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @Explain(displayName="type") + public PrincipalType getType() { + return type; + } + + public void setType(PrincipalType type) { + this.type = type; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeDesc.java (revision 0) @@ -0,0 +1,74 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; +import java.util.List; + +import org.apache.hadoop.hive.ql.security.authorization.Privilege; + +@Explain(displayName = "Privilege") +public class PrivilegeDesc implements Serializable, Cloneable { + private static final long serialVersionUID = 1L; + + private Privilege privilege; + + private List columns; + + public PrivilegeDesc(Privilege privilege, List columns) { + super(); + this.privilege = privilege; + this.columns = columns; + } + + public PrivilegeDesc() { + super(); + } + + /** + * @return privilege definition + */ + @Explain(displayName = "privilege") + public Privilege getPrivilege() { + return privilege; + } + + /** + * @param privilege + */ + public void setPrivilege(Privilege privilege) { + this.privilege = privilege; + } + + /** + * @return columns on which the given privilege take affect. + */ + @Explain(displayName = "columns") + public List getColumns() { + return columns; + } + + /** + * @param columns + */ + public void setColumns(List columns) { + this.columns = columns; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PrivilegeObjectDesc.java (revision 0) @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.util.HashMap; + +@Explain(displayName="privilege subject") +public class PrivilegeObjectDesc { + + private boolean table; + + private String object; + + private HashMap partSpec; + + public PrivilegeObjectDesc(boolean isTable, String object, + HashMap partSpec) { + super(); + this.table = isTable; + this.object = object; + this.partSpec = partSpec; + } + + public PrivilegeObjectDesc() { + } + + @Explain(displayName="is table") + public boolean getTable() { + return table; + } + + public void setTable(boolean isTable) { + this.table = isTable; + } + + @Explain(displayName="object") + public String getObject() { + return object; + } + + public void setObject(String object) { + this.object = object; + } + + @Explain(displayName="partition spec") + public HashMap getPartSpec() { + return partSpec; + } + + public void setPartSpec(HashMap partSpec) { + this.partSpec = partSpec; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java (revision 0) @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; +import java.util.List; + +@Explain(displayName="Revoke") +public class RevokeDesc extends DDLDesc implements Serializable, Cloneable { + + private static final long serialVersionUID = 1L; + + private List privileges; + + private List principals; + + private PrivilegeObjectDesc privilegeSubjectDesc; + + public RevokeDesc(){ + } + + public RevokeDesc(List privileges, + List principals, PrivilegeObjectDesc privilegeSubjectDesc) { + super(); + this.privileges = privileges; + this.principals = principals; + this.privilegeSubjectDesc = privilegeSubjectDesc; + } + + public List getPrivileges() { + return privileges; + } + + public void setPrivileges(List privileges) { + this.privileges = privileges; + } + + public List getPrincipals() { + return principals; + } + + public void setPrincipals(List principals) { + this.principals = principals; + } + + public PrivilegeObjectDesc getPrivilegeSubjectDesc() { + return privilegeSubjectDesc; + } + + public void setPrivilegeSubjectDesc(PrivilegeObjectDesc privilegeSubjectDesc) { + this.privilegeSubjectDesc = privilegeSubjectDesc; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/RoleDDLDesc.java (revision 0) @@ -0,0 +1,115 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.plan; + +import java.io.Serializable; + +@Explain(displayName = "Create Role") +public class RoleDDLDesc extends DDLDesc implements Serializable { + + private static final long serialVersionUID = 1L; + + private String name; + + private boolean role; + + private boolean group; + + private RoleOperation operation; + + private String resFile; + + public static enum RoleOperation { + DROP_ROLE("drop_role"), CREATE_ROLE("create_role"), SHOW_ROLE_GRANT("show_roles"); + private String operationName; + + private RoleOperation() { + } + + private RoleOperation(String operationName) { + this.operationName = operationName; + } + + public String getOperationName() { + return operationName; + } + + public String toString () { + return this.operationName; + } + } + + public RoleDDLDesc(){ + } + + public RoleDDLDesc(String roleName, RoleOperation operation) { + this(roleName, false, false, operation); + } + + public RoleDDLDesc(String principalName, boolean isRole, boolean isGroup, + RoleOperation operation) { + this.name = principalName; + this.role = isRole; + this.group = isGroup; + this.operation = operation; + } + + @Explain(displayName = "name") + public String getName() { + return name; + } + + public void setName(String roleName) { + this.name = roleName; + } + + @Explain(displayName = "role operation") + public RoleOperation getOperation() { + return operation; + } + + public void setOperation(RoleOperation operation) { + this.operation = operation; + } + + public boolean getRole() { + return role; + } + + public void setRole(boolean role) { + this.role = role; + } + + public boolean getGroup() { + return group; + } + + public void setGroup(boolean group) { + this.group = group; + } + + public String getResFile() { + return resFile; + } + + public void setResFile(String resFile) { + this.resFile = resFile; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowGrantDesc.java (revision 0) @@ -0,0 +1,78 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.plan; + +import java.util.List; + +@Explain(displayName="show grant desc") +public class ShowGrantDesc { + + private PrincipalDesc principalDesc; + + private PrivilegeObjectDesc hiveObj; + + private List columns; + + private String resFile; + + public ShowGrantDesc(){ + } + + public ShowGrantDesc(String resFile, PrincipalDesc principalDesc, + PrivilegeObjectDesc subjectObj, List columns) { + this.resFile = resFile; + this.principalDesc = principalDesc; + this.hiveObj = subjectObj; + this.columns = columns; + } + + @Explain(displayName="principal desc") + public PrincipalDesc getPrincipalDesc() { + return principalDesc; + } + + public void setPrincipalDesc(PrincipalDesc principalDesc) { + this.principalDesc = principalDesc; + } + + @Explain(displayName="object") + public PrivilegeObjectDesc getHiveObj() { + return hiveObj; + } + + public void setHiveObj(PrivilegeObjectDesc subjectObj) { + this.hiveObj = subjectObj; + } + + public String getResFile() { + return resFile; + } + + public void setResFile(String resFile) { + this.resFile = resFile; + } + + public List getColumns() { + return columns; + } + + public void setColumns(List columns) { + this.columns = columns; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/Authenticator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/Authenticator.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/Authenticator.java (revision 0) @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; + +public interface Authenticator { + + public String getUserName(); + + public List getGroupNames(); + + public boolean detroy(); + + public void init(Configuration conf); + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/AuthenticatorFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/AuthenticatorFactory.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/AuthenticatorFactory.java (revision 0) @@ -0,0 +1,52 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +public class AuthenticatorFactory { + + @SuppressWarnings("unchecked") + public static Authenticator getAuthenticator(Configuration conf) throws HiveException { + + String clsStr = HiveConf.getVar(conf, + HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER); + + Authenticator ret = null; + try { + Class cls = null; + if (clsStr == null || clsStr.trim().equals("")) { + cls = HadoopDefaultAuthenticator.class; + } else { + cls = (Class) Class + .forName(clsStr); + } + if (cls != null) { + ret = cls.newInstance(); + ret.init(conf); + } + } catch (Exception e) { + throw new HiveException(e); + } + + return ret; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java (revision 0) @@ -0,0 +1,86 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import java.lang.reflect.Method; +import java.util.Arrays; +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; + +public class HadoopDefaultAuthenticator implements Authenticator { + + private String userName; + private List groupNames; + + @Override + public List getGroupNames() { + return groupNames; + } + + @Override + public String getUserName() { + return userName; + } + + @Override + public void init(Configuration conf) { + Class ugiCls = UserGroupInformation.class; + UserGroupInformation ugi = null; + try { + Method loginMethod = ugiCls.getDeclaredMethod("login", + new Class[] { Configuration.class }); + if (loginMethod != null) { + ugi = (UserGroupInformation) loginMethod.invoke(null, conf); + } + } catch (Exception e) { + } + + if (ugi == null) { + try { + Method loginMethod = ugiCls.getDeclaredMethod("getLoginUser", + (Class[]) null); + if (loginMethod != null) { + ugi = (UserGroupInformation) loginMethod.invoke(null, (Object) null); + } + } catch (Exception e) { + } + } + + if (ugi == null) { + throw new RuntimeException( + "Can not initialize HadoopDefaultAuthenticator."); + } + + this.userName = ugi.getUserName(); + if (ugi.getGroupNames() != null) { + this.groupNames = Arrays.asList(ugi.getGroupNames()); + } + + System.out.println("User Name is " + this.userName); + System.out.println("Group Names are " + this.groupNames); + } + + @Override + public boolean detroy() { + return true; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationManagerFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationManagerFactory.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationManagerFactory.java (revision 0) @@ -0,0 +1,56 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.security.Authenticator; + +public class AuthorizationManagerFactory { + + @SuppressWarnings("unchecked") + public static AuthorizationProviderManager getAuthorizeProviderManager( + Configuration conf, Authenticator authenticator) throws HiveException { + + String clsStr = HiveConf.getVar(conf, + HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); + + AuthorizationProviderManager ret = null; + try { + Class cls = null; + if (clsStr == null || clsStr.trim().equals("")) { + cls = DefaultAuthorizationProviderManager.class; + } else { + cls = (Class) Class + .forName(clsStr); + } + if (cls != null) { + ret = cls.newInstance(); + ret.init(conf); + } + } catch (Exception e) { + throw new HiveException(e); + } + + ret.setAuthenticator(authenticator); + return ret; + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationProvider.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationProvider.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationProvider.java (revision 0) @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +public interface AuthorizationProvider { + + public void init(Configuration conf) throws HiveException; + + public boolean access(Table table, PrincipalPrivilegeSet privs); + + public boolean access(Partition part, PrincipalPrivilegeSet privs); + + public boolean access(Table table, FieldSchema column, PrincipalPrivilegeSet privs); + + public boolean access(PrincipalPrivilegeSet privs); + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationProviderManager.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationProviderManager.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationProviderManager.java (revision 0) @@ -0,0 +1,67 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.security.Authenticator; + +public abstract class AuthorizationProviderManager { + + protected Authenticator authenticator; + + protected Hive hive_db; + + public void init(Configuration conf) throws HiveException { + hive_db = Hive.get(new HiveConf(conf, AuthorizationProviderManager.class)); + } + + protected Authenticator getAuthenticator() { + return authenticator; + } + + protected void setAuthenticator(Authenticator authenticator) { + this.authenticator = authenticator; + } + + public abstract boolean authorize(Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException; + + public abstract boolean authorize(Database db, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException; + + public abstract boolean authorize(Table table, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException; + + public abstract boolean authorize(Partition part, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException; + + public abstract boolean authorize(Table table, Partition part, + List columns, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) throws HiveException; + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultAuthorizationProviderManager.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultAuthorizationProviderManager.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultAuthorizationProviderManager.java (revision 0) @@ -0,0 +1,354 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.util.StringUtils; + +public class DefaultAuthorizationProviderManager extends + AuthorizationProviderManager { + + static class BitSetChecker { + + boolean[] inputCheck = null; + boolean[] outputCheck = null; + + public static BitSetChecker getBitSetChecker(Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) { + BitSetChecker checker = new BitSetChecker(); + if (inputRequiredPriv != null) { + checker.inputCheck = new boolean[inputRequiredPriv.length]; + for (int i = 0; i < checker.inputCheck.length; i++) { + checker.inputCheck[i] = false; + } + } + if (outputRequiredPriv != null) { + checker.outputCheck = new boolean[outputRequiredPriv.length]; + for (int i = 0; i < checker.outputCheck.length; i++) { + checker.outputCheck[i] = false; + } + } + + return checker; + } + + } + + @Override + public boolean authorize(Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException { + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + return authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv, + outputCheck); + } + + protected boolean authorizeUserPriv(Privilege[] inputRequiredPriv, + boolean[] inputCheck, Privilege[] outputRequiredPriv, + boolean[] outputCheck) throws HiveException { + PrincipalPrivilegeSet privileges = hive_db.get_user_privilege_set(this + .getAuthenticator().getUserName(), this.getAuthenticator() + .getGroupNames()); + return authorizePrivileges(privileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck); + } + + @Override + public boolean authorize(Database db, Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException { + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + return authorizeUserAndDBPriv(db, inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck); + } + + private boolean authorizeUserAndDBPriv(Database db, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + boolean[] inputCheck, boolean[] outputCheck) throws HiveException { + if (authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv, + outputCheck)) { + return true; + } + + PrincipalPrivilegeSet dbPrivileges = hive_db.get_db_privilege_set(db + .getName(), this.getAuthenticator().getUserName(), this + .getAuthenticator().getGroupNames()); + + if (authorizePrivileges(dbPrivileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck)) { + return true; + } + + return false; + } + + @Override + public boolean authorize(Table table, Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException { + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + return authorizeUserDBAndTable(table, inputRequiredPriv, + outputRequiredPriv, inputCheck, outputCheck); + } + + private boolean authorizeUserDBAndTable(Table table, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + boolean[] inputCheck, boolean[] outputCheck) throws HiveException { + if (authorizeUserAndDBPriv(hive_db.getDatabase(table.getDbName()), + inputRequiredPriv, outputRequiredPriv, inputCheck, outputCheck)) { + return true; + } + + PrincipalPrivilegeSet tablePrivileges = hive_db.get_table_privilege_set( + table.getDbName(), table.getTableName(), this.getAuthenticator() + .getUserName(), this.getAuthenticator().getGroupNames()); + + if (authorizePrivileges(tablePrivileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck)) { + return true; + } + + return false; + } + + @Override + public boolean authorize(Partition part, Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException { + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + if (authorizeUserAndDBPriv( + hive_db.getDatabase(part.getTable().getDbName()), inputRequiredPriv, + outputRequiredPriv, inputCheck, outputCheck)) { + return true; + } + + PrincipalPrivilegeSet tablePrivileges = hive_db + .get_partition_privilege_set(part.getTable().getDbName(), part + .getTable().getTableName(), part.getName(), this.getAuthenticator() + .getUserName(), this.getAuthenticator().getGroupNames()); + + if (authorizePrivileges(tablePrivileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck)) { + return true; + } + + return false; + } + + @Override + public boolean authorize(Table table, Partition part, List columns, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv) + throws HiveException { + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + if (authorizeUserDBAndTable(table, inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck)) { + return true; + } + + String partName = null; + if (part != null) { + partName = part.getName(); + } + + for (String col : columns) { + + BitSetChecker checker2 = BitSetChecker.getBitSetChecker( + inputRequiredPriv, outputRequiredPriv); + boolean[] inputCheck2 = checker2.inputCheck; + boolean[] outputCheck2 = checker2.outputCheck; + + PrincipalPrivilegeSet columnPrivileges = hive_db + .get_column_privilege_set(table.getDbName(), table.getTableName(), + partName, col, this.getAuthenticator().getUserName(), this + .getAuthenticator().getGroupNames()); + + authorizePrivileges(columnPrivileges, inputRequiredPriv, inputCheck2, + outputRequiredPriv, outputCheck2); + if (inputCheck2 != null) { + booleanArrayOr(inputCheck2, inputCheck); + } + if (outputCheck2 != null) { + booleanArrayOr(inputCheck2, inputCheck); + } + + if (containsFalse(inputCheck2) || containsFalse(outputCheck2)) { + return false; + } + } + + return true; + } + + protected boolean authorizePrivileges(PrincipalPrivilegeSet privileges, + Privilege[] inputPriv, boolean[] inputCheck, Privilege[] outputPriv, + boolean[] outputCheck) throws HiveException { + + boolean pass = true; + if (inputPriv != null) { + pass = pass && matchPrivs(inputPriv, privileges, inputCheck); + } + if (outputPriv != null) { + pass = pass && matchPrivs(outputPriv, privileges, outputCheck); + } + return pass; + } + + /** + * try to match an array of privileges from user/groups/roles grants. + * + * @param container + */ + private boolean matchPrivs(Privilege[] inputPriv, + PrincipalPrivilegeSet privileges, boolean[] check) { + + if (inputPriv == null) + return true; + + /* + * user grants + */ + Set privSet = new HashSet(); + if (privileges.getUserPrivileges() != null + && privileges.getUserPrivileges().size() > 0) { + Collection userPrivs = privileges.getUserPrivileges().values(); + if (userPrivs != null && userPrivs.size() > 0) { + for (String priv : userPrivs) { + if (priv == null || priv.trim().equals("")) + continue; + + String[] privs = priv.split(StringUtils.COMMA_STR); + for (String p : privs) { + if (p.equals(Privilege.ALL)) { + setBooleanArray(check, true); + return true; + } + privSet.add(p.toLowerCase()); + } + } + } + } + + /* + * group grants + */ + if (privileges.getGroupPrivileges() != null + && privileges.getGroupPrivileges().size() > 0) { + Collection groupPrivs = privileges.getGroupPrivileges().values(); + if (groupPrivs != null && groupPrivs.size() > 0) { + for (String priv : groupPrivs) { + if (priv == null || priv.trim().equals("")) + continue; + + String[] privs = priv.split(StringUtils.COMMA_STR); + for (String p : privs) { + if (p.equals(Privilege.ALL)) { + setBooleanArray(check, true); + return true; + } + privSet.add(p.toLowerCase()); + } + } + } + } + + /* + * roles grants + */ + if (privileges.getRolePrivileges() != null + && privileges.getRolePrivileges().size() > 0) { + Collection rolePrivs = privileges.getRolePrivileges().values(); + if (rolePrivs != null && rolePrivs.size() > 0) { + for (String priv : rolePrivs) { + if (priv == null || priv.trim().equals("")) + continue; + + String[] privs = priv.split(StringUtils.COMMA_STR); + for (String p : privs) { + if (p.equals(Privilege.ALL)) { + setBooleanArray(check, true); + return true; + } + privSet.add(p.toLowerCase()); + } + } + } + } + + for (int i = 0; i < inputPriv.length; i++) { + String toMatch = inputPriv[i].getPriv(); + if (!check[i]) { + check[i] = privSet.contains(toMatch.toLowerCase()); + } + } + + return !containsFalse(check); + } + + private static void setBooleanArray(boolean[] check, boolean b) { + for (int i = 0; i < check.length; i++) { + check[i] = b; + } + } + + private static void booleanArrayOr(boolean[] output, boolean[] input) { + for (int i = 0; i < output.length && i < input.length; i++) { + output[i] = output[i] || input[i]; + } + } + + private boolean containsFalse(boolean[] inputCheck) { + if (inputCheck != null) { + System.out.println("BitSet's length is " + inputCheck.length); + + for (int i = 0; i < inputCheck.length; i++) { + if (!inputCheck[i]) { + return true; + } + } + } + return false; + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java (revision 0) @@ -0,0 +1,97 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import java.util.EnumSet; + +public class Privilege { + + private String priv; + + private EnumSet supportedScopeSet; + + private Privilege(String priv, EnumSet scopeSet) { + super(); + this.priv = priv; + this.supportedScopeSet = scopeSet; + } + + public Privilege(String priv) { + super(); + this.priv = priv; + + } + + public String getPriv() { + return priv; + } + + public void setPriv(String priv) { + this.priv = priv; + } + + public boolean supportColumnLevel() { + return supportedScopeSet != null + && supportedScopeSet.contains(PrivilegeScope.COLUMN_LEVEL_SCOPE); + } + + public boolean supportDBLevel() { + return supportedScopeSet != null + && supportedScopeSet.contains(PrivilegeScope.DB_LEVEL_SCOPE); + } + + public boolean supportTableLevel() { + return supportedScopeSet != null + && supportedScopeSet.contains(PrivilegeScope.TABLE_LEVEL_SCOPE); + } + + public Privilege() { + } + + public static Privilege ALL = new Privilege("All Privileges", + PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN); + + public static Privilege ALTER_METADATA = new Privilege("Alter Metadata", + PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN); + + public static Privilege ALTER_DATA = new Privilege("Alter Data", + PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN); + + public static Privilege CREATE = new Privilege("Create", + PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN); + + public static Privilege DROP = new Privilege("Drop", + PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN); + + public static Privilege INDEX = new Privilege("Index", + PrivilegeScope.ALLSCOPE); + + public static Privilege LOCK = new Privilege("Lock", + PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN); + + public static Privilege SELECT = new Privilege("Select", + PrivilegeScope.ALLSCOPE); + + public static Privilege SHOW_DATABASE = new Privilege("Show Database", + EnumSet.of(PrivilegeScope.USER_LEVEL_SCOPE)); + + public static Privilege SUPER = new Privilege("Super", + PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN); + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeRegistry.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeRegistry.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeRegistry.java (revision 0) @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import java.util.HashMap; +import java.util.Map; + +public class PrivilegeRegistry { + + protected static Map Registry = new HashMap(); + + static { + Registry.put(Privilege.ALL.getPriv().toLowerCase(), Privilege.ALL); + Registry.put(Privilege.ALTER_DATA.getPriv().toLowerCase(), + Privilege.ALTER_DATA); + Registry.put(Privilege.ALTER_METADATA.getPriv().toLowerCase(), + Privilege.ALTER_METADATA); + Registry.put(Privilege.CREATE.getPriv().toLowerCase(), Privilege.CREATE); + Registry.put(Privilege.DROP.getPriv().toLowerCase(), Privilege.DROP); + Registry.put(Privilege.INDEX.getPriv().toLowerCase(), Privilege.INDEX); + Registry.put(Privilege.LOCK.getPriv().toLowerCase(), Privilege.LOCK); + Registry.put(Privilege.SELECT.getPriv().toLowerCase(), Privilege.SELECT); + Registry.put(Privilege.SHOW_DATABASE.getPriv().toLowerCase(), + Privilege.SHOW_DATABASE); + Registry.put(Privilege.SUPER.getPriv().toLowerCase(), Privilege.SUPER); + } + + public static Privilege getPrivilege(String privilegeName) { + return Registry.get(privilegeName.toLowerCase()); + } + +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeScope.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeScope.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeScope.java (revision 0) @@ -0,0 +1,57 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import java.util.EnumSet; + +/** + * PrivilegeScope describes a hive defined privilege's scope + * (global/database/table/column). For example some hive privileges are + * db-level only, some are global, and some are table only. + */ +public enum PrivilegeScope { + + USER_LEVEL_SCOPE((short) 0x01), + DB_LEVEL_SCOPE((short) 0x02), + TABLE_LEVEL_SCOPE((short) 0x04), + COLUMN_LEVEL_SCOPE((short) 0x08); + + private short mode; + + private PrivilegeScope(short mode) { + this.mode = mode; + } + + public short getMode() { + return mode; + } + + public void setMode(short mode) { + this.mode = mode; + } + + public static EnumSet ALLSCOPE = EnumSet.of( + PrivilegeScope.USER_LEVEL_SCOPE, PrivilegeScope.DB_LEVEL_SCOPE, + PrivilegeScope.TABLE_LEVEL_SCOPE, PrivilegeScope.COLUMN_LEVEL_SCOPE); + + public static EnumSet ALLSCOPE_EXCEPT_COLUMN = EnumSet.of( + PrivilegeScope.USER_LEVEL_SCOPE, PrivilegeScope.DB_LEVEL_SCOPE, + PrivilegeScope.TABLE_LEVEL_SCOPE); + +} Index: ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (revision 1030336) +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (working copy) @@ -40,6 +40,12 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.history.HiveHistory; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.security.Authenticator; +import org.apache.hadoop.hive.ql.security.AuthenticatorFactory; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationManagerFactory; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationProviderManager; import org.apache.hadoop.hive.ql.util.DosToUnix; import org.apache.log4j.LogManager; import org.apache.log4j.PropertyConfigurator; @@ -77,7 +83,11 @@ /** * type of the command. */ - private String commandType; + private HiveOperation commandType; + + private AuthorizationProviderManager authorizer; + + private Authenticator authenticator; /** * Lineage state. @@ -150,11 +160,15 @@ /** * start a new session and set it to current session. + * @throws HiveException */ - public static SessionState start(HiveConf conf) { + public static SessionState start(HiveConf conf) throws HiveException { SessionState ss = new SessionState(conf); ss.getConf().setVar(HiveConf.ConfVars.HIVESESSIONID, makeSessionId()); ss.hiveHist = new HiveHistory(ss); + ss.authenticator = AuthenticatorFactory.getAuthenticator(conf); + ss.authorizer = AuthorizationManagerFactory.getAuthorizeProviderManager( + conf, ss.authenticator); tss.set(ss); return (ss); } @@ -163,8 +177,9 @@ * set current session to existing session object if a thread is running * multiple sessions - it must call this method with the new session object * when switching from one session to another. + * @throws HiveException */ - public static SessionState start(SessionState startSs) { + public static SessionState start(SessionState startSs) throws HiveException { tss.set(startSs); if (StringUtils.isEmpty(startSs.getConf().getVar( @@ -176,6 +191,12 @@ if (startSs.hiveHist == null) { startSs.hiveHist = new HiveHistory(startSs); } + + startSs.authenticator = AuthenticatorFactory.getAuthenticator(startSs + .getConf()); + startSs.authorizer = AuthorizationManagerFactory + .getAuthorizeProviderManager(startSs.getConf(), startSs.authenticator); + return startSs; } @@ -539,10 +560,30 @@ } public String getCommandType() { + return commandType.getOperationName(); + } + + public HiveOperation getHiveOperation() { return commandType; } - public void setCommandType(String commandType) { + public void setCommandType(HiveOperation commandType) { this.commandType = commandType; } + + public AuthorizationProviderManager getAuthorizer() { + return authorizer; + } + + public void setAuthorizer(AuthorizationProviderManager authorizer) { + this.authorizer = authorizer; + } + + public Authenticator getAuthenticator() { + return authenticator; + } + + public void setAuthenticator(Authenticator authenticator) { + this.authenticator = authenticator; + } }