diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java index 8520d8f..6fcdd29 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java @@ -57,7 +57,7 @@ private static final String partitionedColumnName = "partcolabc"; private static final String partitionedColumnValue = "20090619"; private static final String partitionedTableComment = "Partitioned table"; - private static final String dataTypeTableName = "testDataTypeTable"; + private static final String dataTypeTableName = "testdatatypetable"; private static final String dataTypeTableComment = "Table with many column data types"; private final HiveConf conf; private final Path dataFilePath; diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java index 4090e72..7176ff5 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java @@ -77,7 +77,7 @@ private static final String partitionedColumnName = "partcolabc"; private static final String partitionedColumnValue = "20090619"; private static final String partitionedTableComment = "Partitioned table"; - private static final String dataTypeTableName = "testDataTypeTable"; + private static final String dataTypeTableName = "testdatatypetable"; private static final String dataTypeTableComment = "Table with many column data types"; private final HiveConf conf; private final Path dataFilePath; diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 56e9f0c..a39eafe 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -3684,94 +3684,129 @@ public PrincipalType getPrincipalType(String principalType) { public List list_privileges(String principalName, PrincipalType principalType, HiveObjectRef hiveObject) throws MetaException, TException { + if (hiveObject.getObjectType() == null) { + return getAllPrivileges(principalName, principalType); + } if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) { - return this.list_global_privileges(principalName, principalType); - } else if (hiveObject.getObjectType() == HiveObjectType.DATABASE) { - return this.list_db_privileges(principalName, principalType, hiveObject + return list_global_privileges(principalName, principalType); + } + if (hiveObject.getObjectType() == HiveObjectType.DATABASE) { + return list_db_privileges(principalName, principalType, hiveObject .getDbName()); - } else if (hiveObject.getObjectType() == HiveObjectType.TABLE) { - return this.list_table_privileges(principalName, principalType, + } + if (hiveObject.getObjectType() == HiveObjectType.TABLE) { + return list_table_privileges(principalName, principalType, hiveObject.getDbName(), hiveObject.getObjectName()); - } else if (hiveObject.getObjectType() == HiveObjectType.PARTITION) { - return this.list_partition_privileges(principalName, principalType, + } + if (hiveObject.getObjectType() == HiveObjectType.PARTITION) { + return list_partition_privileges(principalName, principalType, hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject - .getPartValues()); - } else if (hiveObject.getObjectType() == HiveObjectType.COLUMN) { - return this.list_column_privileges(principalName, principalType, + .getPartValues()); + } + if (hiveObject.getObjectType() == HiveObjectType.COLUMN) { + if (hiveObject.getPartValues() == null || hiveObject.getPartValues().isEmpty()) { + return list_table_column_privileges(principalName, principalType, + hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject.getColumnName()); + } + return list_partition_column_privileges(principalName, principalType, hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject - .getPartValues(), hiveObject.getColumnName()); + .getPartValues(), hiveObject.getColumnName()); } return null; } - public List list_column_privileges( + private List getAllPrivileges(String principalName, + PrincipalType principalType) throws TException { + List privs = new ArrayList(); + privs.addAll(list_global_privileges(principalName, principalType)); + privs.addAll(list_db_privileges(principalName, principalType, null)); + privs.addAll(list_table_privileges(principalName, principalType, null, null)); + privs.addAll(list_partition_privileges(principalName, principalType, null, null, null)); + privs.addAll(list_table_column_privileges(principalName, principalType, null, null, null)); + privs.addAll(list_partition_column_privileges(principalName, principalType, + null, null, null, null)); + return privs; + } + + public List list_table_column_privileges( + final String principalName, final PrincipalType principalType, + final String dbName, final String tableName, final String columnName) + throws MetaException, TException { + incrementCounter("list_table_column_privileges"); + + try { + if (dbName == null) { + return getMS().listPrincipalTableColumnGrantsAll(principalName, principalType); + } + if (principalName == null) { + return getMS().listTableColumnGrantsAll(dbName, tableName, columnName); + } + List mTableCols = getMS() + .listPrincipalTableColumnGrants(principalName, principalType, + dbName, tableName, columnName); + if (mTableCols.isEmpty()) { + return Collections.emptyList(); + } + List result = new ArrayList(); + for (int i = 0; i < mTableCols.size(); i++) { + MTableColumnPrivilege sCol = mTableCols.get(i); + HiveObjectRef objectRef = new HiveObjectRef( + HiveObjectType.COLUMN, dbName, tableName, null, sCol.getColumnName()); + HiveObjectPrivilege secObj = new HiveObjectPrivilege( + objectRef, sCol.getPrincipalName(), principalType, + new PrivilegeGrantInfo(sCol.getPrivilege(), sCol + .getCreateTime(), sCol.getGrantor(), PrincipalType + .valueOf(sCol.getGrantorType()), sCol + .getGrantOption())); + result.add(secObj); + } + return result; + } catch (MetaException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public List list_partition_column_privileges( final String principalName, final PrincipalType principalType, final String dbName, final String tableName, final List partValues, final String columnName) throws MetaException, TException { - incrementCounter("list_security_column_grant"); + incrementCounter("list_partition_column_privileges"); - List ret = null; try { - RawStore ms = getMS(); - String partName = null; - if (partValues != null && partValues.size() > 0) { - Table tbl = get_table(dbName, tableName); - partName = Warehouse.makePartName(tbl.getPartitionKeys(), partValues); - } - - List result = Collections. emptyList(); - - if (partName != null) { - Partition part = null; - part = get_partition_by_name(dbName, tableName, partName); - List mPartitionCols = ms.listPrincipalPartitionColumnGrants( - principalName, - principalType, dbName, tableName, partName, columnName); - if (mPartitionCols.size() > 0) { - result = new ArrayList(); - for (int i = 0; i < mPartitionCols.size(); i++) { - MPartitionColumnPrivilege sCol = mPartitionCols.get(i); - HiveObjectRef objectRef = new HiveObjectRef( - HiveObjectType.COLUMN, dbName, tableName, - part == null ? null : part.getValues(), sCol - .getColumnName()); - HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, - sCol.getPrincipalName(), principalType, - new PrivilegeGrantInfo(sCol.getPrivilege(), sCol - .getCreateTime(), sCol.getGrantor(), PrincipalType - .valueOf(sCol.getGrantorType()), sCol.getGrantOption())); - result.add(secObj); - } - } - } else { - List mTableCols = ms - .listPrincipalTableColumnGrants(principalName, principalType, - dbName, tableName, columnName); - if (mTableCols.size() > 0) { - result = new ArrayList(); - for (int i = 0; i < mTableCols.size(); i++) { - MTableColumnPrivilege sCol = mTableCols.get(i); - HiveObjectRef objectRef = new HiveObjectRef( - HiveObjectType.COLUMN, dbName, tableName, null, sCol - .getColumnName()); - HiveObjectPrivilege secObj = new HiveObjectPrivilege( - objectRef, sCol.getPrincipalName(), principalType, - new PrivilegeGrantInfo(sCol.getPrivilege(), sCol - .getCreateTime(), sCol.getGrantor(), PrincipalType - .valueOf(sCol.getGrantorType()), sCol - .getGrantOption())); - result.add(secObj); - } - } + if (dbName == null) { + return getMS().listPrincipalPartitionColumnGrantsAll(principalName, principalType); } - - ret = result; + Table tbl = get_table(dbName, tableName); + String partName = Warehouse.makePartName(tbl.getPartitionKeys(), partValues); + if (principalName == null) { + return getMS().listPartitionColumnGrantsAll(dbName, tableName, partName, columnName); + } + List mPartitionCols = getMS().listPrincipalPartitionColumnGrants( + principalName, + principalType, dbName, tableName, partName, columnName); + if (mPartitionCols.isEmpty()) { + return Collections.emptyList(); + } + List result = new ArrayList(); + for (int i = 0; i < mPartitionCols.size(); i++) { + MPartitionColumnPrivilege sCol = mPartitionCols.get(i); + HiveObjectRef objectRef = new HiveObjectRef( + HiveObjectType.COLUMN, dbName, tableName, partValues, sCol.getColumnName()); + HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, + sCol.getPrincipalName(), principalType, + new PrivilegeGrantInfo(sCol.getPrivilege(), sCol + .getCreateTime(), sCol.getGrantor(), PrincipalType + .valueOf(sCol.getGrantorType()), sCol.getGrantOption())); + result.add(secObj); + } + return result; } catch (MetaException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } - return ret; } public List list_db_privileges(final String principalName, @@ -3780,25 +3815,30 @@ public PrincipalType getPrincipalType(String principalType) { incrementCounter("list_security_db_grant"); try { - RawStore ms = getMS(); - List mDbs = ms.listPrincipalDBGrants( - principalName, principalType, dbName); - if (mDbs.size() > 0) { - List result = new ArrayList(); - for (int i = 0; i < mDbs.size(); i++) { - MDBPrivilege sDB = mDbs.get(i); - HiveObjectRef objectRef = new HiveObjectRef( - HiveObjectType.DATABASE, dbName, null, null, null); - HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, - sDB.getPrincipalName(), principalType, - new PrivilegeGrantInfo(sDB.getPrivilege(), sDB - .getCreateTime(), sDB.getGrantor(), PrincipalType - .valueOf(sDB.getGrantorType()), sDB.getGrantOption())); - result.add(secObj); - } - return result; + if (dbName == null) { + return getMS().listPrincipalDBGrantsAll(principalName, principalType); + } + if (principalName == null) { + return getMS().listDBGrantsAll(dbName); } - return Collections. emptyList(); + List mDbs = getMS().listPrincipalDBGrants( + principalName, principalType, dbName); + if (mDbs.isEmpty()) { + return Collections.emptyList(); + } + List result = new ArrayList(); + for (int i = 0; i < mDbs.size(); i++) { + MDBPrivilege sDB = mDbs.get(i); + HiveObjectRef objectRef = new HiveObjectRef( + HiveObjectType.DATABASE, dbName, null, null, null); + HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, + sDB.getPrincipalName(), principalType, + new PrivilegeGrantInfo(sDB.getPrivilege(), sDB + .getCreateTime(), sDB.getGrantor(), PrincipalType + .valueOf(sDB.getGrantorType()), sDB.getGrantOption())); + result.add(secObj); + } + return result; } catch (MetaException e) { throw e; } catch (Exception e) { @@ -3813,30 +3853,34 @@ public PrincipalType getPrincipalType(String principalType) { incrementCounter("list_security_partition_grant"); try { - RawStore ms = getMS(); + if (dbName == null) { + return getMS().listPrincipalPartitionGrantsAll(principalName, principalType); + } Table tbl = get_table(dbName, tableName); String partName = Warehouse.makePartName(tbl.getPartitionKeys(), partValues); - List mParts = ms.listPrincipalPartitionGrants( - principalName, principalType, dbName, tableName, partName); - if (mParts.size() > 0) { - List result = new ArrayList(); - for (int i = 0; i < mParts.size(); i++) { - MPartitionPrivilege sPart = mParts.get(i); - HiveObjectRef objectRef = new HiveObjectRef( - HiveObjectType.PARTITION, dbName, tableName, partValues, - null); - HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, - sPart.getPrincipalName(), principalType, - new PrivilegeGrantInfo(sPart.getPrivilege(), sPart - .getCreateTime(), sPart.getGrantor(), PrincipalType - .valueOf(sPart.getGrantorType()), sPart - .getGrantOption())); - - result.add(secObj); - } - return result; + if (principalName == null) { + return getMS().listPartitionGrantsAll(dbName, tableName, partName); } - return Collections. emptyList(); + List mParts = getMS().listPrincipalPartitionGrants( + principalName, principalType, dbName, tableName, partName); + if (mParts.isEmpty()) { + return Collections. emptyList(); + } + List result = new ArrayList(); + for (int i = 0; i < mParts.size(); i++) { + MPartitionPrivilege sPart = mParts.get(i); + HiveObjectRef objectRef = new HiveObjectRef( + HiveObjectType.PARTITION, dbName, tableName, partValues, null); + HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, + sPart.getPrincipalName(), principalType, + new PrivilegeGrantInfo(sPart.getPrivilege(), sPart + .getCreateTime(), sPart.getGrantor(), PrincipalType + .valueOf(sPart.getGrantorType()), sPart + .getGrantOption())); + + result.add(secObj); + } + return result; } catch (MetaException e) { throw e; } catch (Exception e) { @@ -3851,24 +3895,30 @@ public PrincipalType getPrincipalType(String principalType) { incrementCounter("list_security_table_grant"); try { + if (dbName == null) { + return getMS().listPrincipalTableGrantsAll(principalName, principalType); + } + if (principalName == null) { + return getMS().listTableGrantsAll(dbName, tableName); + } List mTbls = getMS() .listAllTableGrants(principalName, principalType, dbName, tableName); - if (mTbls.size() > 0) { - List result = new ArrayList(); - for (int i = 0; i < mTbls.size(); i++) { - MTablePrivilege sTbl = mTbls.get(i); - HiveObjectRef objectRef = new HiveObjectRef( - HiveObjectType.TABLE, dbName, tableName, null, null); - HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, - sTbl.getPrincipalName(), principalType, - new PrivilegeGrantInfo(sTbl.getPrivilege(), sTbl.getCreateTime(), sTbl - .getGrantor(), PrincipalType.valueOf(sTbl - .getGrantorType()), sTbl.getGrantOption())); - result.add(secObj); - } - return result; - } - return Collections. emptyList(); + if (mTbls.isEmpty()) { + return Collections. emptyList(); + } + List result = new ArrayList(); + for (int i = 0; i < mTbls.size(); i++) { + MTablePrivilege sTbl = mTbls.get(i); + HiveObjectRef objectRef = new HiveObjectRef( + HiveObjectType.TABLE, dbName, tableName, null, null); + HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef, + sTbl.getPrincipalName(), principalType, + new PrivilegeGrantInfo(sTbl.getPrivilege(), sTbl.getCreateTime(), sTbl + .getGrantor(), PrincipalType.valueOf(sTbl + .getGrantorType()), sTbl.getGrantOption())); + result.add(secObj); + } + return result; } catch (MetaException e) { throw e; } catch (Exception e) { @@ -3882,24 +3932,27 @@ public PrincipalType getPrincipalType(String principalType) { incrementCounter("list_security_user_grant"); try { + if (principalName == null) { + return getMS().listGlobalGrantsAll(); + } List mUsers = getMS().listPrincipalGlobalGrants( principalName, principalType); - if (mUsers.size() > 0) { - List result = new ArrayList(); - for (int i = 0; i < mUsers.size(); i++) { - MGlobalPrivilege sUsr = mUsers.get(i); - HiveObjectRef objectRef = new HiveObjectRef( - HiveObjectType.GLOBAL, null, null, null, null); - HiveObjectPrivilege secUser = new HiveObjectPrivilege( - objectRef, sUsr.getPrincipalName(), principalType, - new PrivilegeGrantInfo(sUsr.getPrivilege(), sUsr - .getCreateTime(), sUsr.getGrantor(), PrincipalType - .valueOf(sUsr.getGrantorType()), sUsr.getGrantOption())); - result.add(secUser); - } - return result; - } - return Collections. emptyList(); + if (mUsers.isEmpty()) { + return Collections. emptyList(); + } + List result = new ArrayList(); + for (int i = 0; i < mUsers.size(); i++) { + MGlobalPrivilege sUsr = mUsers.get(i); + HiveObjectRef objectRef = new HiveObjectRef( + HiveObjectType.GLOBAL, null, null, null, null); + HiveObjectPrivilege secUser = new HiveObjectPrivilege( + objectRef, sUsr.getPrincipalName(), principalType, + new PrivilegeGrantInfo(sUsr.getPrivilege(), sUsr + .getCreateTime(), sUsr.getGrantor(), PrincipalType + .valueOf(sUsr.getGrantorType()), sUsr.getGrantOption())); + result.add(secUser); + } + return result; } catch (MetaException e) { throw e; } catch (Exception e) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 0a52b4d..bf12cf2 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -3863,6 +3863,38 @@ public boolean revokePrivileges(PrivilegeBag privileges) return userNameDbPriv; } + @Override + public List listGlobalGrantsAll() { + boolean commited = false; + try { + openTransaction(); + Query query = pm.newQuery(MGlobalPrivilege.class); + List userNameDbPriv = (List) query.execute(); + pm.retrieveAll(userNameDbPriv); + commited = commitTransaction(); + return convertGlobal(userNameDbPriv); + } finally { + if (!commited) { + rollbackTransaction(); + } + } + } + + private List convertGlobal(List privs) { + List result = new ArrayList(); + for (MGlobalPrivilege priv : privs) { + String pname = priv.getPrincipalName(); + PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); + + HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null); + PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), + priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); + + result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); + } + return result; + } + @SuppressWarnings("unchecked") @Override public List listPrincipalDBGrants(String principalName, @@ -3891,6 +3923,34 @@ public boolean revokePrivileges(PrivilegeBag privileges) return mSecurityDBList; } + @Override + public List listPrincipalDBGrantsAll( + String principalName, PrincipalType principalType) { + return convertDB(listPrincipalAllDBGrant(principalName, principalType)); + } + + @Override + public List listDBGrantsAll(String dbName) { + return convertDB(listDatabaseGrants(dbName)); + } + + private List convertDB(List privs) { + List result = new ArrayList(); + for (MDBPrivilege priv : privs) { + String pname = priv.getPrincipalName(); + PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); + String database = priv.getDatabase().getName(); + + HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.DATABASE, database, + null, null, null); + PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), + priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); + + result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); + } + return result; + } + @SuppressWarnings("unchecked") private List listPrincipalAllDBGrant( String principalName, PrincipalType principalType) { @@ -4257,6 +4317,78 @@ public boolean revokePrivileges(PrivilegeBag privileges) return mSecurityColList; } + @Override + public List listPrincipalPartitionColumnGrantsAll( + String principalName, PrincipalType principalType) { + boolean success = false; + try { + openTransaction(); + LOG.debug("Executing listPrincipalPartitionColumnGrantsAll"); + Query query = pm.newQuery(MPartitionColumnPrivilege.class, + "principalName == t1 && principalType == t2"); + query.declareParameters("java.lang.String t1, java.lang.String t2"); + List mSecurityTabPartList = (List) + query.executeWithArray(principalName, principalType.toString()); + LOG.debug("Done executing query for listPrincipalPartitionColumnGrantsAll"); + pm.retrieveAll(mSecurityTabPartList); + List result = convertPartCols(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listPrincipalPartitionColumnGrantsAll"); + return result; + } finally { + if (!success) { + rollbackTransaction(); + } + } + } + + @Override + public List listPartitionColumnGrantsAll( + String dbName, String tableName, String partitionName, String columnName) { + boolean success = false; + try { + openTransaction(); + LOG.debug("Executing listPartitionColumnGrantsAll"); + Query query = pm.newQuery(MPartitionColumnPrivilege.class, + "partition.table.tableName == t3 && partition.table.database.name == t4 && " + + "partition.partitionName == t5 && columnName == t6"); + query.declareParameters( + "java.lang.String t3, java.lang.String t4, java.lang.String t5, java.lang.String t6"); + List mSecurityTabPartList = (List) + query.executeWithArray(tableName, dbName, partitionName, columnName); + LOG.debug("Done executing query for listPartitionColumnGrantsAll"); + pm.retrieveAll(mSecurityTabPartList); + List result = convertPartCols(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listPartitionColumnGrantsAll"); + return result; + } finally { + if (!success) { + rollbackTransaction(); + } + } + } + + private List convertPartCols(List privs) { + List result = new ArrayList(); + for (MPartitionColumnPrivilege priv : privs) { + String pname = priv.getPrincipalName(); + PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); + + MPartition mpartition = priv.getPartition(); + MTable mtable = mpartition.getTable(); + MDatabase mdatabase = mtable.getDatabase(); + + HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.COLUMN, + mdatabase.getName(), mtable.getTableName(), mpartition.getValues(), priv.getColumnName()); + PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), + priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); + + result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); + } + return result; + } + @SuppressWarnings("unchecked") private List listPrincipalAllTableGrants( String principalName, PrincipalType principalType) { @@ -4284,6 +4416,74 @@ public boolean revokePrivileges(PrivilegeBag privileges) return mSecurityTabPartList; } + @Override + public List listPrincipalTableGrantsAll( + String principalName, PrincipalType principalType) { + boolean success = false; + try { + openTransaction(); + LOG.debug("Executing listPrincipalAllTableGrants"); + Query query = pm.newQuery(MTablePrivilege.class, + "principalName == t1 && principalType == t2"); + query.declareParameters("java.lang.String t1, java.lang.String t2"); + List mSecurityTabPartList = (List) query.execute( + principalName, principalType.toString()); + LOG.debug("Done executing query for listPrincipalAllTableGrants"); + pm.retrieveAll(mSecurityTabPartList); + List result = convertTable(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listPrincipalAllTableGrants"); + return result; + } finally { + if (!success) { + rollbackTransaction(); + } + } + } + + @Override + public List listTableGrantsAll(String dbName, String tableName) { + boolean success = false; + try { + openTransaction(); + LOG.debug("Executing listTableGrantsAll"); + Query query = pm.newQuery(MTablePrivilege.class, + "table.tableName == t1 && table.database.name == t2"); + query.declareParameters("java.lang.String t1, java.lang.String t2"); + List mSecurityTabPartList = (List) + query.executeWithArray(tableName, dbName); + LOG.debug("Done executing query for listTableGrantsAll"); + pm.retrieveAll(mSecurityTabPartList); + List result = convertTable(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listPrincipalAllTableGrants"); + return result; + } finally { + if (!success) { + rollbackTransaction(); + } + } + } + + private List convertTable(List privs) { + List result = new ArrayList(); + for (MTablePrivilege priv : privs) { + String pname = priv.getPrincipalName(); + PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); + + String table = priv.getTable().getTableName(); + String database = priv.getTable().getDatabase().getName(); + + HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.TABLE, database, table, + null, null); + PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), + priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); + + result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); + } + return result; + } + @SuppressWarnings("unchecked") private List listPrincipalAllPartitionGrants( String principalName, PrincipalType principalType) { @@ -4311,6 +4511,77 @@ public boolean revokePrivileges(PrivilegeBag privileges) return mSecurityTabPartList; } + @Override + public List listPrincipalPartitionGrantsAll( + String principalName, PrincipalType principalType) { + boolean success = false; + try { + openTransaction(); + LOG.debug("Executing listPrincipalPartitionGrantsAll"); + Query query = pm.newQuery(MPartitionPrivilege.class, + "principalName == t1 && principalType == t2"); + query.declareParameters("java.lang.String t1, java.lang.String t2"); + List mSecurityTabPartList = (List) + query.execute(principalName, principalType.toString()); + LOG.debug("Done executing query for listPrincipalPartitionGrantsAll"); + pm.retrieveAll(mSecurityTabPartList); + List result = convertPartition(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listPrincipalPartitionGrantsAll"); + return result; + } finally { + if (!success) { + rollbackTransaction(); + } + } + } + + @Override + public List listPartitionGrantsAll( + String dbName, String tableName, String partitionName) { + boolean success = false; + try { + openTransaction(); + LOG.debug("Executing listPrincipalPartitionGrantsAll"); + Query query = pm.newQuery(MPartitionPrivilege.class, + "partition.table.tableName == t3 && partition.table.database.name == t4 && " + + "partition.partitionName == t5"); + query.declareParameters("java.lang.String t3, java.lang.String t4, java.lang.String t5"); + List mSecurityTabPartList = (List) + query.executeWithArray(tableName, dbName, partitionName); + LOG.debug("Done executing query for listPrincipalPartitionGrantsAll"); + pm.retrieveAll(mSecurityTabPartList); + List result = convertPartition(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listPrincipalPartitionGrantsAll"); + return result; + } finally { + if (!success) { + rollbackTransaction(); + } + } + } + + private List convertPartition(List privs) { + List result = new ArrayList(); + for (MPartitionPrivilege priv : privs) { + String pname = priv.getPrincipalName(); + PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); + + MPartition mpartition = priv.getPartition(); + MTable mtable = mpartition.getTable(); + MDatabase mdatabase = mtable.getDatabase(); + + HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.PARTITION, + mdatabase.getName(), mtable.getTableName(), mpartition.getValues(), null); + PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), + priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); + + result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); + } + return result; + } + @SuppressWarnings("unchecked") private List listPrincipalAllTableColumnGrants( String principalName, PrincipalType principalType) { @@ -4337,6 +4608,75 @@ public boolean revokePrivileges(PrivilegeBag privileges) return mSecurityColumnList; } + @Override + public List listPrincipalTableColumnGrantsAll( + String principalName, PrincipalType principalType) { + boolean success = false; + try { + openTransaction(); + LOG.debug("Executing listPrincipalTableColumnGrantsAll"); + Query query = pm.newQuery(MTableColumnPrivilege.class, + "principalName == t1 && principalType == t2"); + query.declareParameters("java.lang.String t1, java.lang.String t2"); + List mSecurityTabPartList = (List) + query.execute(principalName, principalType.toString()); + LOG.debug("Done executing query for listPrincipalTableColumnGrantsAll"); + pm.retrieveAll(mSecurityTabPartList); + List result = convertTableCols(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listPrincipalTableColumnGrantsAll"); + return result; + } finally { + if (!success) { + rollbackTransaction(); + } + } + } + + @Override + public List listTableColumnGrantsAll( + String dbName, String tableName, String columnName) { + boolean success = false; + try { + openTransaction(); + LOG.debug("Executing listPrincipalTableColumnGrantsAll"); + Query query = pm.newQuery(MTableColumnPrivilege.class, + "table.tableName == t3 && table.database.name == t4 && columnName == t5"); + query.declareParameters("java.lang.String t3, java.lang.String t4, java.lang.String t5"); + List mSecurityTabPartList = (List) + query.executeWithArray(tableName, dbName, columnName); + LOG.debug("Done executing query for listPrincipalTableColumnGrantsAll"); + pm.retrieveAll(mSecurityTabPartList); + List result = convertTableCols(mSecurityTabPartList); + success = commitTransaction(); + LOG.debug("Done retrieving all objects for listPrincipalTableColumnGrantsAll"); + return result; + } finally { + if (!success) { + rollbackTransaction(); + } + } + } + + private List convertTableCols(List privs) { + List result = new ArrayList(); + for (MTableColumnPrivilege priv : privs) { + String pname = priv.getPrincipalName(); + PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType()); + + MTable mtable = priv.getTable(); + MDatabase mdatabase = mtable.getDatabase(); + + HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.COLUMN, + mdatabase.getName(), mtable.getTableName(), null, priv.getColumnName()); + PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(), + priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption()); + + result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor)); + } + return result; + } + @SuppressWarnings("unchecked") private List listPrincipalAllPartitionColumnGrants( String principalName, PrincipalType principalType) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java index 27ae3c4..36ed864 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java @@ -29,6 +29,7 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.metastore.api.InvalidInputException; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; @@ -437,28 +438,58 @@ public abstract boolean deleteTableColumnStatistics(String dbName, String tableN String colName) throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException; - public abstract long cleanupEvents(); + public abstract long cleanupEvents(); - public abstract boolean addToken(String tokenIdentifier, String delegationToken); + public abstract boolean addToken(String tokenIdentifier, String delegationToken); - public abstract boolean removeToken(String tokenIdentifier); + public abstract boolean removeToken(String tokenIdentifier); - public abstract String getToken(String tokenIdentifier); + public abstract String getToken(String tokenIdentifier); - public abstract List getAllTokenIdentifiers(); + public abstract List getAllTokenIdentifiers(); - public abstract int addMasterKey(String key) throws MetaException; + public abstract int addMasterKey(String key) throws MetaException; - public abstract void updateMasterKey(Integer seqNo, String key) + public abstract void updateMasterKey(Integer seqNo, String key) throws NoSuchObjectException, MetaException; - public abstract boolean removeMasterKey(Integer keySeq); + public abstract boolean removeMasterKey(Integer keySeq); - public abstract String[] getMasterKeys(); + public abstract String[] getMasterKeys(); - public abstract void verifySchema() throws MetaException; + public abstract void verifySchema() throws MetaException; - public abstract String getMetaStoreSchemaVersion() throws MetaException; + public abstract String getMetaStoreSchemaVersion() throws MetaException; - public abstract void setMetaStoreSchemaVersion(String version, String comment) throws MetaException; + public abstract void setMetaStoreSchemaVersion(String version, String comment) throws MetaException; + + List listPrincipalDBGrantsAll( + String principalName, PrincipalType principalType); + + List listPrincipalTableGrantsAll( + String principalName, PrincipalType principalType); + + List listPrincipalPartitionGrantsAll( + String principalName, PrincipalType principalType); + + List listPrincipalTableColumnGrantsAll( + String principalName, PrincipalType principalType); + + List listPrincipalPartitionColumnGrantsAll( + String principalName, PrincipalType principalType); + + List listGlobalGrantsAll(); + + List listDBGrantsAll(String dbName); + + List listPartitionColumnGrantsAll( + String dbName, String tableName, String partitionName, String columnName); + + List listTableGrantsAll(String dbName, String tableName); + + List listPartitionGrantsAll( + String dbName, String tableName, String partitionName); + + List listTableColumnGrantsAll( + String dbName, String tableName, String columnName); } diff --git metastore/src/model/org/apache/hadoop/hive/metastore/model/MPrincipalDesc.java metastore/src/model/org/apache/hadoop/hive/metastore/model/MPrincipalDesc.java new file mode 100644 index 0000000..42062b1 --- /dev/null +++ metastore/src/model/org/apache/hadoop/hive/metastore/model/MPrincipalDesc.java @@ -0,0 +1,59 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.model; + +public class MPrincipalDesc { + + private String name; + private String type; + + public MPrincipalDesc() {} + + public MPrincipalDesc(String name, String type) { + this.name = name; + this.type = type; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + @Override + public int hashCode() { + return type.hashCode() + name.hashCode(); + } + + @Override + public boolean equals(Object object) { + MPrincipalDesc another = (MPrincipalDesc) object; + return type.equals(another.type) && name.equals(another.name); + } +} diff --git metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java index 57f1e67..e6f60bd 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java @@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.metastore.api.InvalidInputException; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; @@ -493,6 +494,69 @@ public long cleanupEvents() { } @Override + public List listPrincipalDBGrantsAll( + String principalName, PrincipalType principalType) { + return objectStore.listPrincipalDBGrantsAll(principalName, principalType); + } + + @Override + public List listPrincipalTableGrantsAll( + String principalName, PrincipalType principalType) { + return objectStore.listPrincipalTableGrantsAll(principalName, principalType); + } + + @Override + public List listPrincipalPartitionGrantsAll( + String principalName, PrincipalType principalType) { + return objectStore.listPrincipalPartitionGrantsAll(principalName, principalType); + } + + @Override + public List listPrincipalTableColumnGrantsAll( + String principalName, PrincipalType principalType) { + return objectStore.listPrincipalTableColumnGrantsAll(principalName, principalType); + } + + @Override + public List listPrincipalPartitionColumnGrantsAll( + String principalName, PrincipalType principalType) { + return objectStore.listPrincipalPartitionColumnGrantsAll(principalName, principalType); + } + + @Override + public List listGlobalGrantsAll() { + return objectStore.listGlobalGrantsAll(); + } + + @Override + public List listDBGrantsAll(String dbName) { + return objectStore.listDBGrantsAll(dbName); + } + + @Override + public List listPartitionColumnGrantsAll(String dbName, String tableName, + String partitionName, String columnName) { + return objectStore.listPartitionColumnGrantsAll(dbName, tableName, partitionName, columnName); + } + + @Override + public List listTableGrantsAll(String dbName, String tableName) { + return objectStore.listTableGrantsAll(dbName, tableName); + } + + @Override + public List listPartitionGrantsAll(String dbName, String tableName, + String partitionName) { + return objectStore.listPartitionGrantsAll(dbName, tableName, partitionName); + } + + @Override + public List listTableColumnGrantsAll(String dbName, String tableName, + String columnName) { + return objectStore.listTableColumnGrantsAll(dbName, tableName, columnName); + } + + @Override public ColumnStatistics getTableColumnStatistics(String dbName, String tableName, String colName) throws MetaException, NoSuchObjectException, InvalidInputException { diff --git metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java index c0e720f..0b50ef2 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.metastore.api.InvalidInputException; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; @@ -562,6 +563,66 @@ public boolean removeMasterKey(Integer keySeq) { } @Override + public List listPrincipalDBGrantsAll( + String principalName, PrincipalType principalType) { + return null; + } + + @Override + public List listPrincipalTableGrantsAll( + String principalName, PrincipalType principalType) { + return null; + } + + @Override + public List listPrincipalPartitionGrantsAll( + String principalName, PrincipalType principalType) { + return null; + } + + @Override + public List listPrincipalTableColumnGrantsAll( + String principalName, PrincipalType principalType) { + return null; + } + + @Override + public List listPrincipalPartitionColumnGrantsAll( + String principalName, PrincipalType principalType) { + return null; + } + + @Override + public List listGlobalGrantsAll() { + return null; + } + + @Override + public List listDBGrantsAll(String dbName) { + return null; + } + + @Override + public List listPartitionColumnGrantsAll(String dbName, String tableName, String partitionName, String columnName) { + return null; + } + + @Override + public List listTableGrantsAll(String dbName, String tableName) { + return null; + } + + @Override + public List listPartitionGrantsAll(String dbName, String tableName, String partitionName) { + return null; + } + + @Override + public List listTableColumnGrantsAll(String dbName, String tableName, String columnName) { + return null; + } + + @Override public ColumnStatistics getTableColumnStatistics(String dbName, String tableName, String colName) throws MetaException, NoSuchObjectException { return null; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 617bba8..5847629 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -507,17 +507,19 @@ private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL) } private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException { - StringBuilder builder = new StringBuilder(); + + PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc(); + PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj(); + String principalName = principalDesc == null ? null : principalDesc.getName(); + PrincipalType type = principalDesc == null ? null : principalDesc.getType(); + + List privs = new ArrayList(); try { - PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc(); - PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj(); - String principalName = principalDesc.getName(); if (hiveObjectDesc == null) { - List users = db.showPrivilegeGrant( - HiveObjectType.GLOBAL, principalName, principalDesc.getType(), - null, null, null, null); - writeGrantInfo(builder, principalDesc.getType(), principalName, - null, null, null, null, users); + privs.addAll(db.showPrivilegeGrant(HiveObjectType.GLOBAL, principalName, type, + null, null, null, null)); + } else if (hiveObjectDesc != null && hiveObjectDesc.getObject() == null) { + privs.addAll(db.showPrivilegeGrant(null, principalName, type, null, null, null, null)); } else { String obj = hiveObjectDesc.getObject(); boolean notFound = true; @@ -557,40 +559,31 @@ private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException { if (!hiveObjectDesc.getTable()) { // show database level privileges - List dbs = db.showPrivilegeGrant(HiveObjectType.DATABASE, principalName, - principalDesc.getType(), dbName, null, null, null); - writeGrantInfo(builder, principalDesc.getType(), principalName, - dbName, null, null, null, dbs); + privs.addAll(db.showPrivilegeGrant(HiveObjectType.DATABASE, + principalName, type, dbName, null, null, null)); } else { if (showGrantDesc.getColumns() != null) { // show column level privileges for (String columnName : showGrantDesc.getColumns()) { - List columnss = db.showPrivilegeGrant( + privs.addAll(db.showPrivilegeGrant( HiveObjectType.COLUMN, principalName, - principalDesc.getType(), dbName, tableName, partValues, - columnName); - writeGrantInfo(builder, principalDesc.getType(), - principalName, dbName, tableName, partName, columnName, - columnss); + type, dbName, tableName, partValues, + columnName)); } } else if (hiveObjectDesc.getPartSpec() != null) { // show partition level privileges - List parts = db.showPrivilegeGrant( - HiveObjectType.PARTITION, principalName, principalDesc - .getType(), dbName, tableName, partValues, null); - writeGrantInfo(builder, principalDesc.getType(), - principalName, dbName, tableName, partName, null, parts); + privs.addAll(db.showPrivilegeGrant( + HiveObjectType.PARTITION, principalName, type, + dbName, tableName, partValues, null)); } else { // show table level privileges - List tbls = db.showPrivilegeGrant( - HiveObjectType.TABLE, principalName, principalDesc.getType(), - dbName, tableName, null, null); - writeGrantInfo(builder, principalDesc.getType(), - principalName, dbName, tableName, null, null, tbls); + privs.addAll(db.showPrivilegeGrant( + HiveObjectType.TABLE, principalName, type, + dbName, tableName, null, null)); } } } - writeToFile(builder.toString(), showGrantDesc.getResFile()); + writeToFile(writeGrantInfo(privs), showGrantDesc.getResFile()); } catch (FileNotFoundException e) { LOG.info("show table status: " + stringifyException(e)); return 1; @@ -2913,48 +2906,55 @@ private static void fixDecimalColumnTypeName(List cols) { } } - public static void writeGrantInfo(StringBuilder builder, - PrincipalType principalType, String principalName, String dbName, - String tableName, String partName, String columnName, - List privileges) throws IOException { + static String writeGrantInfo(List privileges) { if (privileges == null || privileges.isEmpty()) { - return; + return ""; } - sortPrivileges(privileges); - + StringBuilder builder = new StringBuilder(); for (HiveObjectPrivilege privilege : privileges) { PrivilegeGrantInfo grantInfo = privilege.getGrantInfo(); + HiveObjectRef resource = privilege.getHiveObject(); String privName = grantInfo.getPrivilege(); long unixTimestamp = grantInfo.getCreateTime() * 1000L; Date createTime = new Date(unixTimestamp); String grantor = grantInfo.getGrantor(); - if (dbName != null) { - writeKeyValuePair(builder, "database", dbName); - } - if (tableName != null) { - writeKeyValuePair(builder, "table", tableName); - } - if (partName != null) { - writeKeyValuePair(builder, "partition", partName); - } - if (columnName != null) { - writeKeyValuePair(builder, "columnName", columnName); + switch (resource.getObjectType()) { + case DATABASE: + writeKeyValuePair(builder, "database", resource.getDbName()); + break; + case TABLE: + writeKeyValuePair(builder, "database", resource.getDbName()); + writeKeyValuePair(builder, "table", resource.getObjectName()); + break; + case PARTITION: + writeKeyValuePair(builder, "database", resource.getDbName()); + writeKeyValuePair(builder, "table", resource.getObjectName()); + writeKeyValuePair(builder, "partition", String.valueOf(resource.getPartValues())); + break; + case COLUMN: + writeKeyValuePair(builder, "database", resource.getDbName()); + writeKeyValuePair(builder, "table", resource.getObjectName()); + if (resource.getPartValues() != null && !resource.getPartValues().isEmpty()) { + writeKeyValuePair(builder, "partition", String.valueOf(resource.getPartValues())); + } + writeKeyValuePair(builder, "columnName", resource.getColumnName()); + break; } - writeKeyValuePair(builder, "principalName", principalName); - writeKeyValuePair(builder, "principalType", "" + principalType); + writeKeyValuePair(builder, "principalName", privilege.getPrincipalName()); + writeKeyValuePair(builder, "principalType", "" + privilege.getPrincipalType()); writeKeyValuePair(builder, "privilege", privName); writeKeyValuePair(builder, "grantTime", "" + createTime); if (grantor != null) { writeKeyValuePair(builder, "grantor", grantor); } } + return builder.toString(); } - private static void writeKeyValuePair(StringBuilder builder, String key, - String value) throws IOException { + private static void writeKeyValuePair(StringBuilder builder, String key, String value) { if (builder.length() > 0) { builder.append((char)terminator); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index 1ce6bf3..b5d610c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -260,6 +260,7 @@ TOK_PRINCIPAL_NAME; TOK_USER; TOK_GROUP; TOK_ROLE; +TOK_RESOURCE_ALL; TOK_GRANT_WITH_OPTION; TOK_GRANT_WITH_ADMIN_OPTION; TOK_PRIV_ALL; @@ -1353,14 +1354,15 @@ showRoles showGrants @init {msgs.push("show grants");} @after {msgs.pop();} - : KW_SHOW KW_GRANT principalName privilegeIncludeColObject? - -> ^(TOK_SHOW_GRANT principalName privilegeIncludeColObject?) + : KW_SHOW KW_GRANT principalName? (KW_ON privilegeIncludeColObject)? + -> ^(TOK_SHOW_GRANT principalName? privilegeIncludeColObject?) ; privilegeIncludeColObject @init {msgs.push("privilege object including columns");} @after {msgs.pop();} - : KW_ON privObjectType identifier (LPAREN cols=columnNameList RPAREN)? partitionSpec? + : KW_ALL -> ^(TOK_RESOURCE_ALL) + | privObjectType identifier (LPAREN cols=columnNameList RPAREN)? partitionSpec? -> ^(TOK_PRIV_OBJECT_COL identifier privObjectType $cols? partitionSpec?) ; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java index c41cd0f..39ebf16 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java @@ -154,59 +154,74 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { @Override public Task createShowGrantTask(ASTNode ast, Path resultFile, HashSet inputs, HashSet outputs) throws SemanticException { + + PrincipalDesc principalDesc = null; PrivilegeObjectDesc privHiveObj = null; + List cols = null; - ASTNode principal = (ASTNode) ast.getChild(0); - PrincipalType type = PrincipalType.USER; - switch (principal.getType()) { - case HiveParser.TOK_USER: - type = PrincipalType.USER; - break; - case HiveParser.TOK_GROUP: - type = PrincipalType.GROUP; - break; - case HiveParser.TOK_ROLE: - type = PrincipalType.ROLE; - break; + ASTNode param = null; + if (ast.getChildCount() > 0) { + param = (ASTNode) ast.getChild(0); + principalDesc = getPrincipalDesc(param); + if (principalDesc != null) { + param = (ASTNode) ast.getChild(1); // shift one + } } - String principalName = BaseSemanticAnalyzer.unescapeIdentifier(principal.getChild(0).getText()); - PrincipalDesc principalDesc = new PrincipalDesc(principalName, type); - List cols = null; - if (ast.getChildCount() > 1) { - ASTNode child = (ASTNode) ast.getChild(1); - if (child.getToken().getType() == HiveParser.TOK_PRIV_OBJECT_COL) { + if (param != null) { + if (param.getType() == HiveParser.TOK_RESOURCE_ALL) { + privHiveObj = new PrivilegeObjectDesc(); + } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) { privHiveObj = new PrivilegeObjectDesc(); //set object name - privHiveObj.setObject(BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText())); + String text = param.getChild(0).getText(); + privHiveObj.setObject(BaseSemanticAnalyzer.unescapeIdentifier(text)); //set object type - ASTNode objTypeNode = (ASTNode) child.getChild(1); + ASTNode objTypeNode = (ASTNode) param.getChild(1); privHiveObj.setTable(objTypeNode.getToken().getType() == HiveParser.TOK_TABLE_TYPE); //set col and partition spec if specified - if (child.getChildCount() > 2) { - for (int i = 2; i < child.getChildCount(); i++) { - ASTNode grandChild = (ASTNode) child.getChild(i); - if (grandChild.getToken().getType() == HiveParser.TOK_PARTSPEC) { - privHiveObj.setPartSpec(DDLSemanticAnalyzer.getPartSpec(grandChild)); - } else if (grandChild.getToken().getType() == HiveParser.TOK_TABCOLNAME) { - cols = BaseSemanticAnalyzer.getColumnNames((ASTNode) grandChild); - } + for (int i = 2; i < param.getChildCount(); i++) { + ASTNode partOrCol = (ASTNode) param.getChild(i); + if (partOrCol.getType() == HiveParser.TOK_PARTSPEC) { + privHiveObj.setPartSpec(DDLSemanticAnalyzer.getPartSpec(partOrCol)); + } else if (partOrCol.getType() == HiveParser.TOK_TABCOLNAME) { + cols = BaseSemanticAnalyzer.getColumnNames(partOrCol); + } else { + throw new SemanticException("Invalid token type " + partOrCol.getType()); } } } } - if (privHiveObj == null && cols != null) { - throw new SemanticException( - "For user-level privileges, column sets should be null. columns=" - + cols.toString()); - } - ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(), principalDesc, privHiveObj, cols); return TaskFactory.get(new DDLWork(inputs, outputs, showGrant), conf); } + + private PrincipalDesc getPrincipalDesc(ASTNode principal) { + PrincipalType type = getPrincipalType(principal); + if (type != null) { + String text = principal.getChild(0).getText(); + String principalName = BaseSemanticAnalyzer.unescapeIdentifier(text); + return new PrincipalDesc(principalName, type); + } + return null; + } + + private PrincipalType getPrincipalType(ASTNode principal) { + switch (principal.getType()) { + case HiveParser.TOK_USER: + return PrincipalType.USER; + case HiveParser.TOK_GROUP: + return PrincipalType.GROUP; + case HiveParser.TOK_ROLE: + return PrincipalType.ROLE; + default: + return null; + } + } + @Override public Task createRevokeRoleTask(ASTNode ast, HashSet inputs, HashSet outputs) { diff --git ql/src/test/queries/clientpositive/authorization_9.q ql/src/test/queries/clientpositive/authorization_9.q new file mode 100644 index 0000000..adfb3d5 --- /dev/null +++ ql/src/test/queries/clientpositive/authorization_9.q @@ -0,0 +1,11 @@ +-- SORT_BEFORE_DIFF + +create table dummy (key string, value string); + +grant select on database default to user hive_test_user; +grant select on table dummy to user hive_test_user; +grant select (key, value) on table dummy to user hive_test_user; + +show grant user hive_test_user on database default; +show grant user hive_test_user on table dummy; +show grant user hive_test_user on all; diff --git ql/src/test/results/clientnegative/authorization_fail_4.q.out ql/src/test/results/clientnegative/authorization_fail_4.q.out index db788d8..91ecbe9 100644 --- ql/src/test/results/clientnegative/authorization_fail_4.q.out +++ ql/src/test/results/clientnegative/authorization_fail_4.q.out @@ -58,7 +58,7 @@ POSTHOOK: query: show grant user hive_test_user on table authorization_fail_4 pa POSTHOOK: type: SHOW_GRANT database default table authorization_fail_4 -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Alter @@ -66,7 +66,7 @@ privilege Alter grantor hive_test_user database default table authorization_fail_4 -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Create diff --git ql/src/test/results/clientnegative/authorization_fail_5.q.out ql/src/test/results/clientnegative/authorization_fail_5.q.out index e492a0a..7c70c66 100644 --- ql/src/test/results/clientnegative/authorization_fail_5.q.out +++ ql/src/test/results/clientnegative/authorization_fail_5.q.out @@ -71,7 +71,7 @@ POSTHOOK: query: show grant user hive_test_user on table authorization_fail part POSTHOOK: type: SHOW_GRANT database default table authorization_fail -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Alter @@ -79,7 +79,7 @@ privilege Alter grantor hive_test_user database default table authorization_fail -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Create @@ -87,7 +87,7 @@ privilege Create grantor hive_test_user database default table authorization_fail -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Select @@ -105,7 +105,7 @@ POSTHOOK: query: show grant user hive_test_user on table authorization_fail part POSTHOOK: type: SHOW_GRANT database default table authorization_fail -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Alter @@ -113,7 +113,7 @@ privilege Alter grantor hive_test_user database default table authorization_fail -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Create diff --git ql/src/test/results/clientnegative/authorization_part.q.out ql/src/test/results/clientnegative/authorization_part.q.out index 7a4bb8c..08866b4 100644 --- ql/src/test/results/clientnegative/authorization_part.q.out +++ ql/src/test/results/clientnegative/authorization_part.q.out @@ -91,7 +91,7 @@ POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).key EXPRESSION [(s POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part_fail -partition ds=2010 +partition [2010] columnName key principalName hive_test_group1 principalType GROUP @@ -106,7 +106,7 @@ POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).key EXPRESSION [(s POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part_fail -partition ds=2010 +partition [2010] principalName hive_test_group1 principalType GROUP privilege Select @@ -166,7 +166,7 @@ POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2011).key EXPRESSION [(s POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2011).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part_fail -partition ds=2011 +partition [2011] columnName key principalName hive_test_group1 principalType GROUP @@ -183,7 +183,7 @@ POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2011).key EXPRESSION [(s POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2011).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part_fail -partition ds=2011 +partition [2011] principalName hive_test_group1 principalType GROUP privilege Select diff --git ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out index 3631763..eea2cb3 100644 --- ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out +++ ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out @@ -97,7 +97,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] columnName key principalName hive_test_user principalType USER @@ -124,7 +124,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010_tmp +partition [2010_tmp] columnName key principalName hive_test_user principalType USER diff --git ql/src/test/results/clientpositive/authorization_2.q.out ql/src/test/results/clientpositive/authorization_2.q.out index ec122bb..5b29855 100644 --- ql/src/test/results/clientpositive/authorization_2.q.out +++ ql/src/test/results/clientpositive/authorization_2.q.out @@ -86,7 +86,7 @@ POSTHOOK: query: show grant user hive_test_user on table authorization_part part POSTHOOK: type: SHOW_GRANT database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Create @@ -94,7 +94,7 @@ privilege Create grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Drop @@ -102,7 +102,7 @@ privilege Drop grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Update @@ -140,7 +140,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] columnName key principalName hive_test_user principalType USER @@ -215,7 +215,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] columnName key principalName hive_test_user principalType USER @@ -324,7 +324,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Create @@ -332,7 +332,7 @@ privilege Create grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Drop @@ -340,7 +340,7 @@ privilege Drop grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Update @@ -386,7 +386,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Create @@ -394,7 +394,7 @@ privilege Create grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Drop @@ -402,7 +402,7 @@ privilege Drop grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Select @@ -410,7 +410,7 @@ privilege Select grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Update @@ -442,14 +442,14 @@ database default table authorization_part principalName hive_test_user principalType USER -privilege Select +privilege Update #### A masked pattern was here #### grantor hive_test_user database default table authorization_part principalName hive_test_user principalType USER -privilege Update +privilege Select #### A masked pattern was here #### grantor hive_test_user PREHOOK: query: select key from authorization_part where ds='2010' order by key limit 20 @@ -535,7 +535,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Create @@ -543,7 +543,7 @@ privilege Create grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Drop @@ -551,7 +551,7 @@ privilege Drop grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Select @@ -559,7 +559,7 @@ privilege Select grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Update @@ -619,7 +619,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Create @@ -627,7 +627,7 @@ privilege Create grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Drop @@ -635,7 +635,7 @@ privilege Drop grantor hive_test_user database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_user principalType USER privilege Update @@ -732,7 +732,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] columnName key principalName hive_test_group1 principalType GROUP @@ -827,7 +827,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] columnName key principalName hive_test_group1 principalType GROUP @@ -997,7 +997,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_group1 principalType GROUP privilege Select @@ -1100,7 +1100,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] principalName hive_test_group1 principalType GROUP privilege Select diff --git ql/src/test/results/clientpositive/authorization_6.q.out ql/src/test/results/clientpositive/authorization_6.q.out index 0685c1e..0d555e4 100644 --- ql/src/test/results/clientpositive/authorization_6.q.out +++ ql/src/test/results/clientpositive/authorization_6.q.out @@ -111,7 +111,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2011).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2011).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2010 +partition [2010] columnName key principalName hive_test_user principalType USER @@ -128,7 +128,7 @@ POSTHOOK: Lineage: authorization_part PARTITION(ds=2011).key EXPRESSION [(src_au POSTHOOK: Lineage: authorization_part PARTITION(ds=2011).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ] database default table authorization_part -partition ds=2011 +partition [2011] columnName key principalName hive_test_user principalType USER diff --git ql/src/test/results/clientpositive/authorization_9.q.out ql/src/test/results/clientpositive/authorization_9.q.out new file mode 100644 index 0000000..bad418c --- /dev/null +++ ql/src/test/results/clientpositive/authorization_9.q.out @@ -0,0 +1,79 @@ +PREHOOK: query: -- SORT_BEFORE_DIFF + +create table dummy (key string, value string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- SORT_BEFORE_DIFF + +create table dummy (key string, value string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@dummy +PREHOOK: query: grant select on database default to user hive_test_user +PREHOOK: type: GRANT_PRIVILEGE +POSTHOOK: query: grant select on database default to user hive_test_user +POSTHOOK: type: GRANT_PRIVILEGE +PREHOOK: query: grant select on table dummy to user hive_test_user +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@dummy +POSTHOOK: query: grant select on table dummy to user hive_test_user +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@dummy +PREHOOK: query: grant select (key, value) on table dummy to user hive_test_user +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@dummy +POSTHOOK: query: grant select (key, value) on table dummy to user hive_test_user +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@dummy +PREHOOK: query: show grant user hive_test_user on database default +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user hive_test_user on database default +POSTHOOK: type: SHOW_GRANT +database default +principalName hive_test_user +principalType USER +privilege Select +#### A masked pattern was here #### +grantor hive_test_user +PREHOOK: query: show grant user hive_test_user on table dummy +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user hive_test_user on table dummy +POSTHOOK: type: SHOW_GRANT +database default +table dummy +principalName hive_test_user +principalType USER +privilege Select +#### A masked pattern was here #### +grantor hive_test_user +PREHOOK: query: show grant user hive_test_user on all +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user hive_test_user on all +POSTHOOK: type: SHOW_GRANT +database default +principalName hive_test_user +principalType USER +privilege Select +#### A masked pattern was here #### +grantor hive_test_user +database default +table dummy +principalName hive_test_user +principalType USER +privilege Select +#### A masked pattern was here #### +grantor hive_test_user +database default +table dummy +columnName key +principalName hive_test_user +principalType USER +privilege Select +#### A masked pattern was here #### +grantor hive_test_user +database default +table dummy +columnName value +principalName hive_test_user +principalType USER +privilege Select +#### A masked pattern was here #### +grantor hive_test_user