diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index cbd9e59..83d5bfc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -55,7 +55,6 @@ import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; import org.apache.hadoop.hive.ql.hooks.Entity; -import org.apache.hadoop.hive.ql.hooks.Entity.Type; import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; import org.apache.hadoop.hive.ql.hooks.Hook; import org.apache.hadoop.hive.ql.hooks.HookContext; @@ -728,22 +727,41 @@ private void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet getHivePrivileges(List privileges) { @@ -922,6 +920,8 @@ private int roleDDL(RoleDDLDesc roleDDLDesc) throws HiveException, IOException { writeToFile(writeRoleInfo(roles, testMode), roleDDLDesc.getResFile()); } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLES)) { List roleNames = db.getAllRoleNames(); + //sort the list to get sorted (deterministic) output (for ease of testing) + Collections.sort(roleNames); Path resFile = new Path(roleDDLDesc.getResFile()); FileSystem fs = resFile.getFileSystem(conf); outStream = fs.create(resFile); @@ -3089,6 +3089,9 @@ static String writeGrantInfo(List privileges, boolean testM return ""; } StringBuilder builder = new StringBuilder(); + //sort the list to get sorted (deterministic) output (for ease of testing) + Collections.sort(privileges); + for (HiveObjectPrivilege privilege : privileges) { HiveObjectRef resource = privilege.getHiveObject(); PrivilegeGrantInfo grantInfo = privilege.getGrantInfo(); @@ -3112,6 +3115,8 @@ static String writeRoleInfo(List roles, boolean testMode) { return ""; } StringBuilder builder = new StringBuilder(); + //sort the list to get sorted (deterministic) output (for ease of testing) + Collections.sort(roles); for (Role role : roles) { appendNonNull(builder, role.getRoleName(), true); appendNonNull(builder, testMode ? -1 : role.getCreateTime() * 1000L); @@ -3129,6 +3134,8 @@ static String writeHiveRoleInfo(List roles, boolean testMode) { return ""; } StringBuilder builder = new StringBuilder(); + //sort the list to get sorted (deterministic) output (for ease of testing) + Collections.sort(roles); for (HiveRole role : roles) { appendNonNull(builder, role.getRoleName(), true); appendNonNull(builder, testMode ? -1 : role.getCreateTime() * 1000L); @@ -3623,12 +3630,7 @@ private void dropTable(Hive db, Table tbl, DropTableDesc dropTbl) throws HiveExc */ private boolean updateModifiedParameters(Map params, HiveConf conf) throws HiveException { String user = null; - try { - user = conf.getUser(); - } catch (IOException e) { - throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to get current user"); - } - + user = SessionState.getUserFromAuthenticator(); params.put("last_modified_by", user); params.put("last_modified_time", Long.toString(System.currentTimeMillis() / 1000)); return true; @@ -4128,11 +4130,7 @@ private int exchangeTablePartition(Hive db, } private int setGenericTableAttributes(Table tbl) throws HiveException { - try { - tbl.setOwner(conf.getUser()); - } catch (IOException e) { - throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to get current user"); - } + tbl.setOwner(SessionState.getUserFromAuthenticator()); // set create time tbl.setCreateTime((int) (System.currentTimeMillis() / 1000)); return 0; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java index 9316a82..1111c9a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java @@ -35,10 +35,16 @@ // Consider a query like: select * from V, where the view V is defined as: // select * from T // The inputs will contain V and T (parent: V) + // T will be marked as an indirect entity using isDirect flag. + // This will help in distinguishing from the case where T is a direct dependency + // For example in the case of "select * from V join T ..." T would be direct dependency + private boolean isDirect = true; // For views, the entities can be nested - by default, entities are at the top level private final Set parents = new HashSet(); + + /** * For serialization only. */ @@ -74,6 +80,11 @@ public ReadEntity(Table t, ReadEntity parent) { initParent(parent); } + public ReadEntity(Table t, ReadEntity parent, boolean isDirect) { + this(t, parent); + this.isDirect = isDirect; + } + /** * Constructor given a partition. * @@ -89,6 +100,12 @@ public ReadEntity(Partition p, ReadEntity parent) { initParent(parent); } + public ReadEntity(Partition p, ReadEntity parent, boolean isDirect) { + this(p, parent); + this.isDirect = isDirect; + } + + public Set getParents() { return parents; } @@ -109,4 +126,14 @@ public boolean equals(Object o) { return false; } } + + public boolean isDirect() { + return isDirect; + } + + public void setDirect(boolean isDirect) { + this.isDirect = isDirect; + } + + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index da34d1a..0b7c128 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -2449,11 +2449,7 @@ public IMetaStoreClient getMSC() throws MetaException { } private String getUserName() { - SessionState ss = SessionState.get(); - if (ss != null && ss.getAuthenticator() != null) { - return ss.getAuthenticator().getUserName(); - } - return null; + return SessionState.getUserFromAuthenticator(); } private List getGroupNames() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index 4296b64..d2aa220 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -44,8 +44,8 @@ import org.apache.hadoop.hive.ql.exec.DependencyCollectionTask; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; -import org.apache.hadoop.hive.ql.exec.MoveTask; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; +import org.apache.hadoop.hive.ql.exec.MoveTask; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -102,7 +102,6 @@ import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.stats.StatsFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; -import org.apache.hadoop.mapred.InputFormat; /** * General utility common functions for the Processor to convert operator into @@ -562,14 +561,18 @@ public static void setMapWork(MapWork plan, ParseContext parseCtx, Set) Class.forName(inputFormatClass); + Class c = Class.forName(inputFormatClass); LOG.info("RCFile format- Using block level merge"); cplan = GenMapRedUtils.createRCFileMergeTask(fsInputDesc, finalName, @@ -1633,7 +1636,7 @@ public static boolean isMergeRequired(List> mvTasks, HiveConf hco // merge for a map-only job // or for a map-reduce job if (currTask.getWork() instanceof TezWork) { - return hconf.getBoolVar(ConfVars.HIVEMERGEMAPFILES) || + return hconf.getBoolVar(ConfVars.HIVEMERGEMAPFILES) || hconf.getBoolVar(ConfVars.HIVEMERGEMAPREDFILES); } else if (currTask.getWork() instanceof MapredWork) { ReduceWork reduceWork = ((MapredWork) currTask.getWork()).getReduceWork(); @@ -1680,9 +1683,9 @@ public static Path createMoveTask(Task currTask, boolean Context baseCtx = parseCtx.getContext(); // if we are on viewfs we don't want to use /tmp as tmp dir since rename from /tmp/.. // to final location /user/hive/warehouse/ will fail later, so instead pick tmp dir - // on same namespace as tbl dir. - Path tmpDir = dest.toUri().getScheme().equals("viewfs") ? - baseCtx.getExtTmpPathRelTo(dest.toUri()) : + // on same namespace as tbl dir. + Path tmpDir = dest.toUri().getScheme().equals("viewfs") ? + baseCtx.getExtTmpPathRelTo(dest.toUri()) : baseCtx.getExternalTmpPath(dest.toUri()); FileSinkDesc fileSinkDesc = fsOp.getConf(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java index 20402d9..4dbe78c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java @@ -124,11 +124,7 @@ public HiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { } } - String userName = null; - if (SessionState.get() != null - && SessionState.get().getAuthenticator() != null) { - userName = SessionState.get().getAuthenticator().getUserName(); - } + String userName = SessionState.getUserFromAuthenticator(); GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc, principalDesc, userName, PrincipalType.USER, grantOption); @@ -247,11 +243,7 @@ private PrincipalType getPrincipalType(ASTNode principal) { roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText())); } - String roleOwnerName = ""; - if (SessionState.get() != null - && SessionState.get().getAuthenticator() != null) { - roleOwnerName = SessionState.get().getAuthenticator().getUserName(); - } + String roleOwnerName = SessionState.getUserFromAuthenticator(); //until change is made to use the admin option. Default to false with V2 authorization diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index c2cb543..c60f56f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -895,12 +895,15 @@ private PlanUtils() { // is already present, make sure the parents are added. // Consider the query: // select * from (select * from V2 union all select * from V3) subq; - // where both V2 and V3 depend on V1 + // where both V2 and V3 depend on V1 (eg V2 : select * from V1, V3: select * from V1), // addInput would be called twice for V1 (one with parent V2 and the other with parent V3). // When addInput is called for the first time for V1, V1 (parent V2) is added to inputs. // When addInput is called for the second time for V1, the input V1 from inputs is picked up, // and it's parents are enhanced to include V2 and V3 - // The inputs will contain: (V2, no parent), (V3, no parent), (v1, parents(V2, v3)) + // The inputs will contain: (V2, no parent), (V3, no parent), (V1, parents(V2, v3)) + // + // If the ReadEntity is already present and another ReadEntity with same name is + // added, then the isDirect flag is updated to be the OR of values of both. public static ReadEntity addInput(Set inputs, ReadEntity newInput) { // If the input is already present, make sure the new parent is added to the input. if (inputs.contains(newInput)) { @@ -908,6 +911,7 @@ public static ReadEntity addInput(Set inputs, ReadEntity newInput) { if (input.equals(newInput)) { if ((newInput.getParents() != null) && (!newInput.getParents().isEmpty())) { input.getParents().addAll(newInput.getParents()); + input.setDirect(input.isDirect() || newInput.isDirect()); } return input; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java index c77eecd..7befff8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/HiveAuthenticationProvider.java @@ -36,6 +36,11 @@ public void destroy() throws HiveException; + /** + * This function is meant to be used only for hive internal implementations of this interface. + * SessionState is not a public interface. + * @param ss SessionState that created this instance + */ public void setSessionState(SessionState ss); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java index e94f635..638967e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java @@ -68,10 +68,11 @@ public static HivePrivilegeObjectType getHivePrivilegeObjectType(Type type) { case DATABASE: return HivePrivilegeObjectType.DATABASE; case TABLE: - return HivePrivilegeObjectType.TABLE; + return HivePrivilegeObjectType.TABLE_OR_VIEW; case LOCAL_DIR: + return HivePrivilegeObjectType.LOCAL_URI; case DFS_DIR: - return HivePrivilegeObjectType.URI; + return HivePrivilegeObjectType.DFS_URI; case PARTITION: case DUMMYPARTITION: //need to determine if a different type is needed for dummy partitions return HivePrivilegeObjectType.PARTITION; @@ -127,12 +128,12 @@ public static HiveObjectType getThriftHiveObjType(HivePrivilegeObjectType type) switch(type){ case DATABASE: return HiveObjectType.DATABASE; - case TABLE: + case TABLE_OR_VIEW: return HiveObjectType.TABLE; case PARTITION: return HiveObjectType.PARTITION; - case URI: - case VIEW: + case LOCAL_URI: + case DFS_URI: throw new HiveException("Unsupported type " + type); default: //should not happen as we have accounted for all types @@ -149,7 +150,7 @@ public static HiveObjectType getThriftHiveObjType(HivePrivilegeObjectType type) */ public static HiveObjectRef getThriftHiveObjectRef(HivePrivilegeObject privObj) throws HiveException { HiveObjectType objType = getThriftHiveObjType(privObj.getType()); - return new HiveObjectRef(objType, privObj.getDbname(), privObj.getTableviewname(), null, null); + return new HiveObjectRef(objType, privObj.getDbname(), privObj.getTableViewURI(), null, null); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java index f994f20..8ccd72b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java @@ -22,6 +22,11 @@ import org.apache.hadoop.hive.metastore.IMetaStoreClient; /** * Factory for getting current valid instance of IMetaStoreClient + * Metastore client cannot be cached in authorization interface as that + * can get invalidated between the calls with the logic in Hive class. + * The standard way of getting metastore client object is through Hive.get().getMSC(). + * But Hive class is not a public interface, so this factory helps in hiding Hive + * class from the authorization interface users. */ @LimitedPrivate(value = { "" }) @Evolving diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java index a774773..a3a689d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java @@ -29,19 +29,34 @@ @Override public String toString() { - return "Hive Object [type=" + type + ", dbname=" + dbname + ", table/viewname=" - + tableviewname + "]"; + String name = null; + switch (type) { + case DATABASE: + name = dbname; + break; + case TABLE_OR_VIEW: + name = (dbname == null ? "" : dbname + ".") + tableviewname; + break; + case LOCAL_URI: + case DFS_URI: + name = tableviewname; + break; + case PARTITION: + break; + } + return "Object [type=" + type + ", name=" + name + "]"; + } - public enum HivePrivilegeObjectType { DATABASE, TABLE, VIEW, PARTITION, URI}; + public enum HivePrivilegeObjectType { DATABASE, TABLE_OR_VIEW, PARTITION, LOCAL_URI, DFS_URI}; private final HivePrivilegeObjectType type; private final String dbname; private final String tableviewname; - public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableviewname){ + public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI){ this.type = type; this.dbname = dbname; - this.tableviewname = tableviewname; + this.tableviewname = tableViewURI; } public HivePrivilegeObjectType getType() { @@ -52,7 +67,7 @@ public String getDbname() { return dbname; } - public String getTableviewname() { + public String getTableViewURI() { return tableviewname; } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRole.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRole.java index a23239b..7f3d78a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRole.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveRole.java @@ -21,10 +21,12 @@ import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; import org.apache.hadoop.hive.metastore.api.Role; +import com.google.common.collect.ComparisonChain; + // same with thrift.Role @LimitedPrivate(value = { "" }) @Evolving -public class HiveRole { +public class HiveRole implements Comparable { private String roleName; private int createTime; @@ -111,4 +113,22 @@ public String getGrantor() { public void setGrantor(String grantor) { this.grantor = grantor; } + + @Override + public int compareTo(HiveRole other) { + if(other == null){ + return 1; + } + return ComparisonChain.start().compare(roleName, other.roleName) + .compare(createTime, other.createTime) + .compare(principalName, other.principalName) + .compare(principalType, other.principalType) + .compare(grantOption, other.grantOption) + .compare(grantTime, other.grantTime) + .compare(grantor, other.grantor) + .result(); + + } + + } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java index 0c535be..fdbf3c3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRole; /** * Utility class to authorize grant/revoke privileges @@ -35,7 +36,7 @@ static void authorize(List hivePrincipals, List hivePrivileges, HivePrivilegeObject hivePrivObject, boolean grantOption, IMetaStoreClient metastoreClient, - String userName) + String userName, List curRoles, boolean isAdmin) throws HiveAuthzPluginException, HiveAccessControlException { // check if this user has grant privileges for this privileges on this @@ -44,23 +45,13 @@ static void authorize(List hivePrincipals, List hi // map priv being granted to required privileges RequiredPrivileges reqPrivs = getGrantRequiredPrivileges(hivePrivileges); - // api for checking required privileges for a user - checkRequiredPrivileges(hivePrincipals, reqPrivs, hivePrivObject, metastoreClient, userName); + // check if this user has necessary privileges (reqPrivs) on this object + checkRequiredPrivileges(reqPrivs, hivePrivObject, metastoreClient, userName, curRoles, isAdmin); } - private static void checkRequiredPrivileges(List hivePrincipals, - RequiredPrivileges reqPrivs, HivePrivilegeObject hivePrivObject, - IMetaStoreClient metastoreClient, String userName) - throws HiveAuthzPluginException, HiveAccessControlException { - - for (HivePrincipal hivePrincipal : hivePrincipals) { - checkRequiredPrivileges(hivePrincipal, reqPrivs, hivePrivObject, metastoreClient, userName); - } - } - - private static void checkRequiredPrivileges(HivePrincipal hivePrincipal, + private static void checkRequiredPrivileges( RequiredPrivileges reqPrivileges, HivePrivilegeObject hivePrivObject, - IMetaStoreClient metastoreClient, String userName) + IMetaStoreClient metastoreClient, String userName, List curRoles, boolean isAdmin) throws HiveAuthzPluginException, HiveAccessControlException { // keep track of the principals on which privileges have been checked for @@ -68,7 +59,7 @@ private static void checkRequiredPrivileges(HivePrincipal hivePrincipal, // get privileges for this user and its roles on this object RequiredPrivileges availPrivs = SQLAuthorizationUtils.getPrivilegesFromMetaStore( - metastoreClient, userName, hivePrivObject); + metastoreClient, userName, hivePrivObject, curRoles, isAdmin); // check if required privileges is subset of available privileges Collection missingPrivs = reqPrivileges.findMissingPrivs(availPrivs); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java index e448cba..fae6844 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java @@ -52,22 +52,23 @@ private static SQLPrivTypeGrant[] SEL_NOGRANT_AR = arr(SQLPrivTypeGrant.SELECT_NOGRANT); private static SQLPrivTypeGrant[] SEL_GRANT_AR = arr(SQLPrivTypeGrant.SELECT_WGRANT); private static SQLPrivTypeGrant[] ADMIN_PRIV_AR = arr(SQLPrivTypeGrant.ADMIN_PRIV); + private static SQLPrivTypeGrant[] INS_NOGRANT_AR = arr(SQLPrivTypeGrant.INSERT_NOGRANT); + private static SQLPrivTypeGrant[] DEL_NOGRANT_AR = arr(SQLPrivTypeGrant.DELETE_NOGRANT); + static { op2Priv = new HashMap(); op2Priv.put(HiveOperationType.EXPLAIN, new InOutPrivs(SEL_NOGRANT_AR, SEL_NOGRANT_AR)); //?? - op2Priv.put(HiveOperationType.LOAD, new InOutPrivs(ADMIN_PRIV_AR, null)); - // select with grant for exporting contents - op2Priv.put(HiveOperationType.EXPORT, new InOutPrivs(SEL_GRANT_AR, null)); - - op2Priv.put(HiveOperationType.IMPORT, new InOutPrivs(ADMIN_PRIV_AR, null)); op2Priv.put(HiveOperationType.CREATEDATABASE, new InOutPrivs(ADMIN_PRIV_AR, null)); - op2Priv.put(HiveOperationType.DROPDATABASE, new InOutPrivs(ADMIN_PRIV_AR, null)); + + op2Priv.put(HiveOperationType.DROPDATABASE, new InOutPrivs(OWNER_PRIV_AR, null)); //this should be database usage privilege once it is supported op2Priv.put(HiveOperationType.SWITCHDATABASE, new InOutPrivs(null, null)); + + // lock operations not controlled for now op2Priv.put(HiveOperationType.LOCKDB, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.UNLOCKDB, new InOutPrivs(null, null)); @@ -78,49 +79,83 @@ //meta store check command - require admin priv op2Priv.put(HiveOperationType.MSCK, new InOutPrivs(ADMIN_PRIV_AR, null)); + //alter table commands require table ownership - op2Priv.put(HiveOperationType.ALTERTABLE_ADDCOLS, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_REPLACECOLS, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_RENAMECOL, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_RENAMEPART, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_RENAME, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_DROPPARTS, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_ADDPARTS, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_TOUCH, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_ARCHIVE, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_UNARCHIVE, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_SERIALIZER, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_PARTCOLTYPE, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERPARTITION_SERIALIZER, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_SERDEPROPERTIES, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERPARTITION_SERDEPROPERTIES, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_CLUSTER_SORT, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_BUCKETNUM, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERPARTITION_BUCKETNUM, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_PROTECTMODE, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERPARTITION_PROTECTMODE, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_FILEFORMAT, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERPARTITION_FILEFORMAT, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_LOCATION, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERPARTITION_LOCATION, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_MERGEFILES, new InOutPrivs(null, null)); - op2Priv.put(HiveOperationType.ALTERPARTITION_MERGEFILES, new InOutPrivs(null, null)); - op2Priv.put(HiveOperationType.ALTERTABLE_SKEWED, new InOutPrivs(null, null)); - op2Priv.put(HiveOperationType.ALTERTBLPART_SKEWED_LOCATION, new InOutPrivs(null, null)); + // There should not be output object, but just in case the table is incorrectly added + // to output instead of input, adding owner requirement on output will catch that as well˙ + op2Priv.put(HiveOperationType.ALTERTABLE_ADDCOLS, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_REPLACECOLS, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_RENAMECOL, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_RENAMEPART, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_RENAME, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_TOUCH, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_ARCHIVE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_UNARCHIVE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_SERIALIZER, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_PARTCOLTYPE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERPARTITION_SERIALIZER, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_SERDEPROPERTIES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERPARTITION_SERDEPROPERTIES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_CLUSTER_SORT, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_BUCKETNUM, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERPARTITION_BUCKETNUM, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_PROTECTMODE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERPARTITION_PROTECTMODE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_FILEFORMAT, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERPARTITION_FILEFORMAT, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_LOCATION, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERPARTITION_LOCATION, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_MERGEFILES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERPARTITION_MERGEFILES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTABLE_SKEWED, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERTBLPART_SKEWED_LOCATION, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.TRUNCATETABLE, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + + //table ownership for create/drop/alter index + op2Priv.put(HiveOperationType.CREATEINDEX, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.DROPINDEX, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERINDEX_REBUILD, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERINDEX_PROPS, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + + // require view ownership for alter/drop view + op2Priv.put(HiveOperationType.ALTERVIEW_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.DROPVIEW_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.ALTERVIEW_RENAME, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); + op2Priv.put(HiveOperationType.DROPVIEW, new InOutPrivs(OWNER_PRIV_AR, OWNER_PRIV_AR)); op2Priv.put(HiveOperationType.ANALYZE_TABLE, new InOutPrivs(arr(SQLPrivTypeGrant.SELECT_NOGRANT, SQLPrivTypeGrant.INSERT_NOGRANT), null)); op2Priv.put(HiveOperationType.SHOWDATABASES, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.SHOWTABLES, new InOutPrivs(null, null)); + // operations that require insert/delete privileges + op2Priv.put(HiveOperationType.ALTERTABLE_DROPPARTS, new InOutPrivs(DEL_NOGRANT_AR, null)); + op2Priv.put(HiveOperationType.ALTERTABLE_ADDPARTS, new InOutPrivs(INS_NOGRANT_AR, null)); + + // select with grant for exporting contents + op2Priv.put(HiveOperationType.EXPORT, new InOutPrivs(SEL_GRANT_AR, null)); + op2Priv.put(HiveOperationType.IMPORT, new InOutPrivs(INS_NOGRANT_AR, null)); + + // operations require select priv op2Priv.put(HiveOperationType.SHOWCOLUMNS, new InOutPrivs(SEL_NOGRANT_AR, null)); op2Priv.put(HiveOperationType.SHOW_TABLESTATUS, new InOutPrivs(SEL_NOGRANT_AR, null)); op2Priv.put(HiveOperationType.SHOW_TBLPROPERTIES, new InOutPrivs(SEL_NOGRANT_AR, null)); + op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, new InOutPrivs(SEL_NOGRANT_AR, OWNER_PRIV_AR)); - //show create table is more sensitive information, includes table properties etc + // QUERY,LOAD op can contain an insert & ovewrite, so require insert+delete privileges on output + op2Priv.put(HiveOperationType.QUERY, new InOutPrivs(SEL_NOGRANT_AR, + arr(SQLPrivTypeGrant.INSERT_NOGRANT, SQLPrivTypeGrant.DELETE_NOGRANT))); + op2Priv.put(HiveOperationType.LOAD, new InOutPrivs(SEL_NOGRANT_AR, + arr(SQLPrivTypeGrant.INSERT_NOGRANT, SQLPrivTypeGrant.DELETE_NOGRANT))); + + // show create table is more sensitive information, includes table properties etc // for now require select WITH GRANT op2Priv.put(HiveOperationType.SHOW_CREATETABLE, new InOutPrivs(SEL_GRANT_AR, null)); + // for now allow only create-view with 'select with grant' + // the owner will also have select with grant privileges on new view + op2Priv.put(HiveOperationType.CREATEVIEW, new InOutPrivs(SEL_GRANT_AR, null)); + op2Priv.put(HiveOperationType.SHOWFUNCTIONS, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.SHOWINDEXES, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.SHOWPARTITIONS, new InOutPrivs(null, null)); @@ -129,21 +164,6 @@ op2Priv.put(HiveOperationType.DROPFUNCTION, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.CREATEMACRO, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.DROPMACRO, new InOutPrivs(null, null)); - op2Priv.put(HiveOperationType.CREATEVIEW, new InOutPrivs(SEL_GRANT_AR, null)); - - // require view ownership - op2Priv.put(HiveOperationType.DROPVIEW, new InOutPrivs(OWNER_PRIV_AR, null)); - - //table ownership for create/drop/alter index - op2Priv.put(HiveOperationType.CREATEINDEX, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.DROPINDEX, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERINDEX_REBUILD, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERINDEX_PROPS, new InOutPrivs(OWNER_PRIV_AR, null)); - - // require view ownership for alter/drop view - op2Priv.put(HiveOperationType.ALTERVIEW_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.DROPVIEW_PROPERTIES, new InOutPrivs(OWNER_PRIV_AR, null)); - op2Priv.put(HiveOperationType.ALTERVIEW_RENAME, new InOutPrivs(OWNER_PRIV_AR, null)); op2Priv.put(HiveOperationType.LOCKTABLE, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.UNLOCKTABLE, new InOutPrivs(null, null)); @@ -151,13 +171,7 @@ // require db ownership op2Priv.put(HiveOperationType.CREATETABLE, new InOutPrivs(OWNER_PRIV_AR, null)); - // require table ownership - op2Priv.put(HiveOperationType.TRUNCATETABLE, new InOutPrivs(OWNER_PRIV_AR, null)); - - op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, new InOutPrivs(OWNER_PRIV_AR, SEL_NOGRANT_AR)); - op2Priv.put(HiveOperationType.QUERY, new InOutPrivs(SEL_NOGRANT_AR, null)); - - op2Priv.put(HiveOperationType.ALTERDATABASE, new InOutPrivs(ADMIN_PRIV_AR, null)); + op2Priv.put(HiveOperationType.ALTERDATABASE, new InOutPrivs(OWNER_PRIV_AR, null)); op2Priv.put(HiveOperationType.DESCDATABASE, new InOutPrivs(null, null)); // The following actions are authorized through SQLStdHiveAccessController, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RequiredPrivileges.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RequiredPrivileges.java index ee06335..10a582b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RequiredPrivileges.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/RequiredPrivileges.java @@ -28,6 +28,8 @@ /** * Captures privilege sets, and can be used to compare required and available privileges * to find missing privileges (if any). + * ADMIN_PRIV is considered a special privilege, if the user has that, then no privilege is + * missing. */ public class RequiredPrivileges { @@ -56,6 +58,12 @@ public void addPrivilege(String priv, boolean withGrant) throws HiveAuthzPluginE */ public Collection findMissingPrivs(RequiredPrivileges availPrivs) { MissingPrivilegeCapturer missingPrivCapturer = new MissingPrivilegeCapturer(); + + if(availPrivs.privilegeGrantSet.contains(SQLPrivTypeGrant.ADMIN_PRIV)){ + //you are an admin! You have all privileges, no missing privileges + return missingPrivCapturer.getMissingPrivileges(); + } + // check the mere mortals! for (SQLPrivTypeGrant requiredPriv : privilegeGrantSet) { if (!availPrivs.privilegeGrantSet.contains(requiredPriv)) { missingPrivCapturer.addMissingPrivilege(requiredPriv); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java index 942b11a..4a9149f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java @@ -21,6 +21,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; @@ -45,6 +46,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRole; import org.apache.thrift.TException; public class SQLAuthorizationUtils { @@ -121,7 +123,7 @@ static HivePrivilegeObjectType getPluginObjType(HiveObjectType objectType) case DATABASE: return HivePrivilegeObjectType.DATABASE; case TABLE: - return HivePrivilegeObjectType.TABLE; + return HivePrivilegeObjectType.TABLE_OR_VIEW; case COLUMN: case GLOBAL: case PARTITION: @@ -151,16 +153,22 @@ public static void validatePrivileges(List hivePrivileges) throws /** * Get the privileges this user(userName argument) has on the object - * (hivePrivObject argument) + * (hivePrivObject argument) If isAdmin is true, adds an admin privilege as + * well. * * @param metastoreClient * @param userName * @param hivePrivObject + * @param curRoles + * current active roles for user + * @param isAdmin + * if user can run as admin user * @return * @throws HiveAuthzPluginException */ static RequiredPrivileges getPrivilegesFromMetaStore(IMetaStoreClient metastoreClient, - String userName, HivePrivilegeObject hivePrivObject) throws HiveAuthzPluginException { + String userName, HivePrivilegeObject hivePrivObject, List curRoles, boolean isAdmin) + throws HiveAuthzPluginException { // get privileges for this user and its role on this object PrincipalPrivilegeSet thrifPrivs = null; @@ -175,6 +183,8 @@ static RequiredPrivileges getPrivilegesFromMetaStore(IMetaStoreClient metastoreC throwGetPrivErr(e, hivePrivObject, userName); } + filterPrivsByCurrentRoles(thrifPrivs, curRoles); + // convert to RequiredPrivileges RequiredPrivileges privs = getRequiredPrivsFromThrift(thrifPrivs); @@ -182,11 +192,42 @@ static RequiredPrivileges getPrivilegesFromMetaStore(IMetaStoreClient metastoreC if (isOwner(metastoreClient, userName, hivePrivObject)) { privs.addPrivilege(SQLPrivTypeGrant.OWNER_PRIV); } + if (isAdmin) { + privs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV); + } return privs; } /** + * Remove any role privileges that don't belong to the roles in curRoles + * @param thriftPrivs + * @param curRoles + * @return + */ + private static void filterPrivsByCurrentRoles(PrincipalPrivilegeSet thriftPrivs, + List curRoles) { + // check if there are privileges to be filtered + if(thriftPrivs == null || thriftPrivs.getRolePrivileges() == null + || thriftPrivs.getRolePrivilegesSize() == 0 + ){ + // no privileges to filter + return; + } + + // add the privs for roles in curRoles to new role-to-priv map + Map> filteredRolePrivs = new HashMap>(); + for(HiveRole role : curRoles){ + String roleName = role.getRoleName(); + List privs = thriftPrivs.getRolePrivileges().get(roleName); + if(privs != null){ + filteredRolePrivs.put(roleName, privs); + } + } + thriftPrivs.setRolePrivileges(filteredRolePrivs); + } + + /** * Check if user is owner of the given object * * @param metastoreClient @@ -200,10 +241,10 @@ static RequiredPrivileges getPrivilegesFromMetaStore(IMetaStoreClient metastoreC private static boolean isOwner(IMetaStoreClient metastoreClient, String userName, HivePrivilegeObject hivePrivObject) throws HiveAuthzPluginException { //for now, check only table - if(hivePrivObject.getType() == HivePrivilegeObjectType.TABLE){ + if(hivePrivObject.getType() == HivePrivilegeObjectType.TABLE_OR_VIEW){ Table thriftTableObj = null; try { - thriftTableObj = metastoreClient.getTable(hivePrivObject.getDbname(), hivePrivObject.getTableviewname()); + thriftTableObj = metastoreClient.getTable(hivePrivObject.getDbname(), hivePrivObject.getTableViewURI()); } catch (MetaException e) { throwGetTableErr(e, hivePrivObject); } catch (NoSuchObjectException e) { @@ -224,7 +265,8 @@ private static void throwGetTableErr(Exception e, HivePrivilegeObject hivePrivOb private static void throwGetPrivErr(Exception e, HivePrivilegeObject hivePrivObject, String userName) throws HiveAuthzPluginException { - String msg = "Error getting privileges on " + hivePrivObject + " for " + userName; + String msg = "Error getting privileges on " + hivePrivObject + " for " + userName + ": " + + e.getMessage(); throw new HiveAuthzPluginException(msg, e); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLPrivTypeGrant.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLPrivTypeGrant.java index 95520f0..8de73d9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLPrivTypeGrant.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLPrivTypeGrant.java @@ -29,8 +29,8 @@ UPDATE_WGRANT(SQLPrivilegeType.UPDATE, true), DELETE_NOGRANT(SQLPrivilegeType.DELETE, false), DELETE_WGRANT(SQLPrivilegeType.DELETE, true), - OWNER_PRIV("Object ownership"), - ADMIN_PRIV("Admin privilege"); // This one can be used to deny permission for performing the operation + OWNER_PRIV("OBJECT OWNERSHIP"), + ADMIN_PRIV("ADMIN PRIVILEGE"); // This one can be used to deny permission for performing the operation private final SQLPrivilegeType privType; private final boolean withGrant; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java index dd5f577..c1afaee 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java @@ -42,7 +42,6 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; @@ -61,6 +60,8 @@ private String currentUserName; private List currentRoles; private HiveRole adminRole; + private final String ADMIN_ONLY_MSG = "User has to belong to ADMIN role and " + + "have it as current role, for this action."; SQLStdHiveAccessController(HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf, HiveAuthenticationProvider authenticator) throws HiveAuthzPluginException { @@ -102,7 +103,7 @@ private void initUserRoles() throws HiveAuthzPluginException { return currentRoles; } catch (Exception e) { throw new HiveAuthzPluginException("Failed to retrieve roles for "+ - currentUserName, e); + currentUserName + ": " + e.getMessage(), e); } } @@ -112,15 +113,12 @@ public void grantPrivileges(List hivePrincipals, HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException, HiveAccessControlException { - // expand ALL privileges, if any - hivePrivileges = expandAllPrivileges(hivePrivileges); - - SQLAuthorizationUtils.validatePrivileges(hivePrivileges); + hivePrivileges = expandAndValidatePrivileges(hivePrivileges); IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient(); // authorize the grant GrantPrivAuthUtils.authorize(hivePrincipals, hivePrivileges, hivePrivObject, grantOption, - metastoreClient, authenticator.getUserName()); + metastoreClient, authenticator.getUserName(), getCurrentRoles(), isUserAdmin()); // grant PrivilegeBag privBag = getThriftPrivilegesBag(hivePrincipals, hivePrivileges, hivePrivObject, @@ -128,10 +126,18 @@ public void grantPrivileges(List hivePrincipals, try { metastoreClient.grant_privileges(privBag); } catch (Exception e) { - throw new HiveAuthzPluginException("Error granting privileges", e); + throw new HiveAuthzPluginException("Error granting privileges: " + e.getMessage(), e); } } + private List expandAndValidatePrivileges(List hivePrivileges) + throws HiveAuthzPluginException { + // expand ALL privileges, if any + hivePrivileges = expandAllPrivileges(hivePrivileges); + SQLAuthorizationUtils.validatePrivileges(hivePrivileges); + return hivePrivileges; + } + private List expandAllPrivileges(List hivePrivileges) { Set hivePrivSet = new HashSet(); for (HivePrivilege hivePrivilege : hivePrivileges) { @@ -196,7 +202,8 @@ public void revokePrivileges(List hivePrincipals, List hivePrivileges, HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException, HiveAccessControlException { - SQLAuthorizationUtils.validatePrivileges(hivePrivileges); + + hivePrivileges = expandAndValidatePrivileges(hivePrivileges); IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient(); // authorize the revoke, and get the set of privileges to be revoked @@ -221,9 +228,9 @@ public void revokePrivileges(List hivePrincipals, public void createRole(String roleName, HivePrincipal adminGrantor) throws HiveAuthzPluginException, HiveAccessControlException { // only user belonging to admin role can create new roles. - if (!this.isUserAdmin(new HivePrincipal(currentUserName,HivePrincipalType.USER))) { + if (!isUserAdmin()) { throw new HiveAccessControlException("Current user : " + currentUserName+ " is not" - + " allowed to add roles. Only users belonging to admin role can add new roles."); + + " allowed to add roles. " + ADMIN_ONLY_MSG); } try { String grantorName = adminGrantor == null ? null : adminGrantor.getName(); @@ -237,9 +244,9 @@ public void createRole(String roleName, HivePrincipal adminGrantor) @Override public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException { // only user belonging to admin role can drop existing role - if (!this.isUserAdmin(new HivePrincipal(currentUserName,HivePrincipalType.USER))) { + if (!isUserAdmin()) { throw new HiveAccessControlException("Current user : " + currentUserName+ " is not" - + " allowed to drop role. Only users belonging to admin role can drop roles."); + + " allowed to drop role. " + ADMIN_ONLY_MSG); } try { metastoreClientFactory.getHiveMetastoreClient().drop_role(roleName); @@ -260,7 +267,7 @@ public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAcces return hiveRoles; } catch (Exception e) { throw new HiveAuthzPluginException("Error listing roles for user " - + hivePrincipal.getName(), e); + + hivePrincipal.getName() + ": " + e.getMessage(), e); } } @@ -268,9 +275,9 @@ public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAcces public void grantRole(List hivePrincipals, List roleNames, boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException, HiveAccessControlException { - if (!this.isUserAdmin(new HivePrincipal(currentUserName,HivePrincipalType.USER))) { + if (!isUserAdmin()) { throw new HiveAccessControlException("Current user : " + currentUserName+ " is not" - + " allowed to grant role. Currently only users belonging to admin role can grant roles."); + + " allowed to grant role. Currently " + ADMIN_ONLY_MSG); } for (HivePrincipal hivePrincipal : hivePrincipals) { for (String roleName : roleNames) { @@ -300,9 +307,9 @@ public void revokeRole(List hivePrincipals, List roleName throw new HiveAuthzPluginException("Revoking only the admin privileges on " + "role is not currently supported"); } - if (!this.isUserAdmin(new HivePrincipal(currentUserName,HivePrincipalType.USER))) { + if (!isUserAdmin()) { throw new HiveAccessControlException("Current user : " + currentUserName+ " is not" - + " allowed to revoke role. Currently only users belonging to admin role can revoke roles."); + + " allowed to revoke role. " + ADMIN_ONLY_MSG); } for (HivePrincipal hivePrincipal : hivePrincipals) { for (String roleName : roleNames) { @@ -312,7 +319,7 @@ public void revokeRole(List hivePrincipals, List roleName AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType())); } catch (Exception e) { String msg = "Error revoking roles for " + hivePrincipal.getName() + " to role " - + roleName; + + roleName + ": " + e.getMessage(); throw new HiveAuthzPluginException(msg, e); } } @@ -322,9 +329,9 @@ public void revokeRole(List hivePrincipals, List roleName @Override public List getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException { // only user belonging to admin role can list role - if (!this.isUserAdmin(new HivePrincipal(currentUserName,HivePrincipalType.USER))) { + if (!isUserAdmin()) { throw new HiveAccessControlException("Current user : " + currentUserName+ " is not" - + " allowed to list roles. Only users belonging to admin role can list roles."); + + " allowed to list roles. " + ADMIN_ONLY_MSG); } try { return metastoreClientFactory.getHiveMetastoreClient().listRoleNames(); @@ -372,7 +379,7 @@ public void revokeRole(List hivePrincipals, List roleName return resPrivInfos; } catch (Exception e) { - throw new HiveAuthzPluginException("Error showing privileges", e); + throw new HiveAuthzPluginException("Error showing privileges: "+ e.getMessage(), e); } } @@ -383,7 +390,7 @@ private HivePrivilegeObjectType getPluginObjType(HiveObjectType objectType) case DATABASE: return HivePrivilegeObjectType.DATABASE; case TABLE: - return HivePrivilegeObjectType.TABLE; + return HivePrivilegeObjectType.TABLE_OR_VIEW; case COLUMN: case GLOBAL: case PARTITION: @@ -429,11 +436,10 @@ public void setCurrentRole(String roleName) throws HiveAccessControlException, } /** - * @param principal * @return true only if current role of user is Admin * @throws HiveAuthzPluginException */ - private boolean isUserAdmin(HivePrincipal principal) throws HiveAuthzPluginException { + boolean isUserAdmin() throws HiveAuthzPluginException { List roles; try { roles = getCurrentRoles(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java index ac50c00..40461f7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java @@ -20,34 +20,51 @@ import java.util.Collection; import java.util.List; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; public class SQLStdHiveAuthorizationValidator implements HiveAuthorizationValidator { private final HiveMetastoreClientFactory metastoreClientFactory; private final HiveConf conf; private final HiveAuthenticationProvider authenticator; + private final SQLStdHiveAccessController privController; + public static final Log LOG = LogFactory.getLog(HiveMetaStore.class); public SQLStdHiveAuthorizationValidator(HiveMetastoreClientFactory metastoreClientFactory, - HiveConf conf, HiveAuthenticationProvider authenticator) { + HiveConf conf, HiveAuthenticationProvider authenticator, + SQLStdHiveAccessController privController) { + this.metastoreClientFactory = metastoreClientFactory; this.conf = conf; this.authenticator = authenticator; + this.privController = privController; } @Override public void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs, List outputHObjs) throws HiveAuthzPluginException, HiveAccessControlException { + + if(LOG.isDebugEnabled()){ + String msg = "Checking privileges for operation " + hiveOpType + " by user " + + authenticator.getUserName() + " on " + " input objects " + inputHObjs + + " and output objects " + outputHObjs; + LOG.debug(msg); + } + String userName = authenticator.getUserName(); IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient(); @@ -69,13 +86,23 @@ private void checkPrivileges(SQLPrivTypeGrant[] reqPrivs, // check if this user has these privileges on the objects for (HivePrivilegeObject hObj : hObjs) { - // get the privileges that this user has on the object - RequiredPrivileges availPrivs = SQLAuthorizationUtils.getPrivilegesFromMetaStore( - metastoreClient, userName, hObj); - Collection missingPriv = requiredInpPrivs - .findMissingPrivs(availPrivs); - SQLAuthorizationUtils.assertNoMissingPrivilege(missingPriv, new HivePrincipal(userName, - HivePrincipalType.USER), hObj); + if (hObj.getType() == HivePrivilegeObjectType.LOCAL_URI) { + + } else if (hObj.getType() == HivePrivilegeObjectType.DFS_URI) { + + } else if (hObj.getType() == HivePrivilegeObjectType.PARTITION) { + // sql std authorization is managing privileges at the table/view levels only + // ignore partitions + } else { + // get the privileges that this user has on the object + RequiredPrivileges availPrivs = SQLAuthorizationUtils.getPrivilegesFromMetaStore( + metastoreClient, userName, hObj, privController.getCurrentRoles(), + privController.isUserAdmin()); + Collection missingPriv = requiredInpPrivs + .findMissingPrivs(availPrivs); + SQLAuthorizationUtils.assertNoMissingPrivilege(missingPriv, new HivePrincipal(userName, + HivePrincipalType.USER), hObj); + } } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java index 5fc5c0f..c2d50b1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizerFactory.java @@ -31,10 +31,12 @@ @Override public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf, HiveAuthenticationProvider authenticator) throws HiveAuthzPluginException { - + SQLStdHiveAccessController privilegeManager = + new SQLStdHiveAccessController(metastoreClientFactory, conf, authenticator); return new HiveAuthorizerImpl( - new SQLStdHiveAccessController(metastoreClientFactory, conf, authenticator), - new SQLStdHiveAuthorizationValidator(metastoreClientFactory, conf, authenticator) + privilegeManager, + new SQLStdHiveAuthorizationValidator(metastoreClientFactory, conf, authenticator, + privilegeManager) ); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java b/ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java index da398b1..3ade2c7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/CreateTableAutomaticGrant.java @@ -35,6 +35,10 @@ private Map> groupGrants; private Map> roleGrants; + // the owner can change, also owner might appear in user grants as well + // so keep owner privileges separate from userGrants + private List ownerGrant; + public static CreateTableAutomaticGrant create(HiveConf conf) throws HiveException { CreateTableAutomaticGrant grants = new CreateTableAutomaticGrant(); @@ -44,20 +48,10 @@ public static CreateTableAutomaticGrant create(HiveConf conf) HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_GROUP_GRANTS)); grants.roleGrants = getGrantMap(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_ROLE_GRANTS)); - - String grantor = null; - if (SessionState.get() != null - && SessionState.get().getAuthenticator() != null) { - grantor = SessionState.get().getAuthenticator().getUserName(); - List ownerGrant = getGrantorInfoList(HiveConf.getVar(conf, - HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS)); - if(ownerGrant != null) { - if (grants.userGrants == null) { - grants.userGrants = new HashMap>(); - } - grants.userGrants.put(grantor, ownerGrant); - } - } + + grants.ownerGrant = getGrantorInfoList(HiveConf.getVar(conf, + HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS)); + return grants; } @@ -94,13 +88,11 @@ public static CreateTableAutomaticGrant create(HiveConf conf) if (privList == null || privList.trim().equals("")) { return null; } - checkPrivilege(privList); + validatePrivilege(privList); String[] grantArray = privList.split(","); List grantInfoList = new ArrayList(); - String grantor = null; - if (SessionState.get().getAuthenticator() != null) { - grantor = SessionState.get().getAuthenticator().getUserName(); - } + String grantor = SessionState.getUserFromAuthenticator(); + for (String grant : grantArray) { grantInfoList.add(new PrivilegeGrantInfo(grant, -1, grantor, PrincipalType.USER, true)); @@ -108,7 +100,7 @@ public static CreateTableAutomaticGrant create(HiveConf conf) return grantInfoList; } - private static void checkPrivilege(String ownerGrantsInConfig) + private static void validatePrivilege(String ownerGrantsInConfig) throws HiveException { String[] ownerGrantArray = ownerGrantsInConfig.split(","); // verify the config @@ -121,7 +113,15 @@ private static void checkPrivilege(String ownerGrantsInConfig) } public Map> getUserGrants() { - return userGrants; + Map> curUserGrants = new HashMap>(); + String owner = SessionState.getUserFromAuthenticator(); + if (owner != null && ownerGrant != null) { + curUserGrants.put(owner, ownerGrant); + } + if (userGrants != null) { + curUserGrants.putAll(userGrants); + } + return curUserGrants; } public Map> getGroupGrants() { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 845ff77..ed191bf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -558,6 +558,19 @@ public static String validateFile(Set curFiles, String newFile) { } } + /** + * + * @return username from current SessionState authenticator. username will be + * null if there is no current SessionState object or authenticator is + * null. + */ + public static String getUserFromAuthenticator() { + if (SessionState.get() != null && SessionState.get().getAuthenticator() != null) { + return SessionState.get().getAuthenticator().getUserName(); + } + return null; + } + public static boolean registerJar(String newJar) { LogHelper console = getConsole(); try { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java new file mode 100644 index 0000000..2d27cec --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java @@ -0,0 +1,196 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.plan; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.Serializable; +import java.util.List; +import java.util.Set; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; +import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; +import org.apache.hadoop.hive.ql.parse.ParseException; +import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Test if ReadEntity isDirect flag is set correctly to indicate if + * the entity represents a direct or indirect dependency. See documentation + * of flag in ReadEntity. + */ +public class TestReadEntityDirect { + + @BeforeClass + public static void onetimeSetup() throws CommandNeedRetryException { + Driver driver = createDriver(); + int ret = driver.run("create table t1(i int)").getResponseCode(); + assertEquals("Checking command success", 0, ret); + ret = driver.run("create view v1 as select * from t1").getResponseCode(); + assertEquals("Checking command success", 0, ret); + } + + @Before + public void setup() { + CheckInputReadEntityDirect.readEntities = null; + } + + /** + * No views in the query so it should be a direct entity + * + * @throws ParseException + */ + @Test + public void testSelectEntityDirect() throws ParseException { + Driver driver = createDriver(); + int ret = driver.compile("select * from t1"); + assertEquals("Checking command success", 0, ret); + assertEquals(1, CheckInputReadEntityDirect.readEntities.size()); + assertTrue("isDirect", CheckInputReadEntityDirect.readEntities.iterator().next().isDirect()); + } + + /** + * Underlying table of view should be marked as indirect + * + * @throws ParseException + */ + @Test + public void testSelectEntityInDirect() throws ParseException { + Driver driver = createDriver(); + int ret = driver.compile("select * from v1"); + assertEquals("Checking command success", 0, ret); + assertEquals(2, CheckInputReadEntityDirect.readEntities.size()); + for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) { + if (readEntity.getName().equals("default@t1")) { + assertFalse("not direct", readEntity.isDirect()); + } else if (readEntity.getName().equals("default@v1")) { + assertTrue("direct", readEntity.isDirect()); + } else { + fail("unexpected entity name " + readEntity.getName()); + } + } + } + + /** + * Underlying table of view should be marked as direct, as it is also accessed + * directly in the join query + * + * @throws ParseException + */ + @Test + public void testSelectEntityViewDirectJoin() throws ParseException { + Driver driver = createDriver(); + int ret = driver.compile("select * from v1 join t1 on (v1.i = t1.i)"); + assertEquals("Checking command success", 0, ret); + assertEquals(2, CheckInputReadEntityDirect.readEntities.size()); + for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) { + if (readEntity.getName().equals("default@t1")) { + assertTrue("direct", readEntity.isDirect()); + } else if (readEntity.getName().equals("default@v1")) { + assertTrue("direct", readEntity.isDirect()); + } else { + fail("unexpected entity name " + readEntity.getName()); + } + } + } + + /** + * Underlying table of view should be marked as direct, as it is also accessed + * directly in the union-all query + * + * @throws ParseException + */ + @Test + public void testSelectEntityViewDirectUnion() throws ParseException { + Driver driver = createDriver(); + int ret = driver.compile("select * from ( select * from v1 union all select * from t1) uv1t1"); + assertEquals("Checking command success", 0, ret); + assertEquals(2, CheckInputReadEntityDirect.readEntities.size()); + for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) { + if (readEntity.getName().equals("default@t1")) { + assertTrue("direct", readEntity.isDirect()); + } else if (readEntity.getName().equals("default@v1")) { + assertTrue("direct", readEntity.isDirect()); + } else { + fail("unexpected entity name " + readEntity.getName()); + } + } + } + + /** + * Underlying table of view should be marked as indirect. Query with join of views and aliases + * + * @throws ParseException + */ + @Test + public void testSelectEntityInDirectJoinAlias() throws ParseException { + Driver driver = createDriver(); + int ret = driver.compile("select * from v1 as a join v1 as b on (a.i = b.i)"); + assertEquals("Checking command success", 0, ret); + assertEquals(2, CheckInputReadEntityDirect.readEntities.size()); + for (ReadEntity readEntity : CheckInputReadEntityDirect.readEntities) { + if (readEntity.getName().equals("default@t1")) { + assertFalse("not direct", readEntity.isDirect()); + } else if (readEntity.getName().equals("default@v1")) { + assertTrue("direct", readEntity.isDirect()); + } else { + fail("unexpected entity name " + readEntity.getName()); + } + } + } + + /** + * Create driver with the test hook set in config + */ + private static Driver createDriver() { + HiveConf conf = new HiveConf(Driver.class); + conf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK, + CheckInputReadEntityDirect.class.getName()); + HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + SessionState.start(conf); + Driver driver = new Driver(conf); + driver.init(); + return driver; + } + + /** + * Hook used in the test to capture the set of ReadEntities + */ + public static class CheckInputReadEntityDirect extends AbstractSemanticAnalyzerHook { + public static Set readEntities; + + @Override + public void postAnalyze(HiveSemanticAnalyzerHookContext context, + List> rootTasks) throws SemanticException { + readEntities = context.getInputs(); + } + + } + +} diff --git a/ql/src/test/queries/clientnegative/authorization_addpartition.q b/ql/src/test/queries/clientnegative/authorization_addpartition.q new file mode 100644 index 0000000..64d8a3d --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_addpartition.q @@ -0,0 +1,8 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +-- check add partition without insert privilege +create table tpart(i int, j int) partitioned by (k string); +set user.name=user1; +alter table tpart add partition (k = 'abc') location 'file:${system:test.tmp.dir}/temp' ; diff --git a/ql/src/test/queries/clientnegative/authorization_createview.q b/ql/src/test/queries/clientnegative/authorization_createview.q new file mode 100644 index 0000000..f7ee26f --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_createview.q @@ -0,0 +1,10 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +-- check create view without select privileges +create table t1(i int); +set user.name=user1; +create view v1 as select * from t1; + + diff --git a/ql/src/test/queries/clientnegative/authorization_ctas.q b/ql/src/test/queries/clientnegative/authorization_ctas.q new file mode 100644 index 0000000..8507c42 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_ctas.q @@ -0,0 +1,10 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +-- check query without select privilege fails +create table t1(i int); + +set user.name=user1; +create table t2 as select * from t1; + diff --git a/ql/src/test/queries/clientnegative/authorization_droppartition.q b/ql/src/test/queries/clientnegative/authorization_droppartition.q new file mode 100644 index 0000000..45ed99b --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_droppartition.q @@ -0,0 +1,9 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +-- check drop partition without delete privilege +create table tpart(i int, j int) partitioned by (k string); +alter table tpart add partition (k = 'abc') location 'file:${system:test.tmp.dir}/temp' ; +set user.name=user1; +alter table tpart drop partition (k = 'abc'); diff --git a/ql/src/test/queries/clientnegative/authorization_insert_noinspriv.q b/ql/src/test/queries/clientnegative/authorization_insert_noinspriv.q new file mode 100644 index 0000000..14fd307 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_insert_noinspriv.q @@ -0,0 +1,11 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +-- check insert without select priv +create table t1(i int); + +set user.name=user1; +create table user2tab(i int); +insert into table t1 select * from user2tab; + diff --git a/ql/src/test/queries/clientnegative/authorization_insert_noselectpriv.q b/ql/src/test/queries/clientnegative/authorization_insert_noselectpriv.q new file mode 100644 index 0000000..ee6dd20 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_insert_noselectpriv.q @@ -0,0 +1,11 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +-- check insert without select priv +create table t1(i int); + +set user.name=user1; +create table t2(i int); +insert into table t2 select * from t1; + diff --git a/ql/src/test/queries/clientnegative/authorization_not_owner_alter_tab_rename.q b/ql/src/test/queries/clientnegative/authorization_not_owner_alter_tab_rename.q new file mode 100644 index 0000000..3f7b7df --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_not_owner_alter_tab_rename.q @@ -0,0 +1,10 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +-- check if alter table fails as different user +create table t1(i int); + +set user.name=user2; +alter table t1 rename to tnew1; diff --git a/ql/src/test/queries/clientnegative/authorization_not_owner_alter_tab_serdeprop.q b/ql/src/test/queries/clientnegative/authorization_not_owner_alter_tab_serdeprop.q new file mode 100644 index 0000000..a06ac18 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_not_owner_alter_tab_serdeprop.q @@ -0,0 +1,10 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +-- check if alter table fails as different user +create table t1(i int); + +set user.name=user2; +ALTER TABLE t1 SET SERDEPROPERTIES ('field.delim' = ','); diff --git a/ql/src/test/queries/clientnegative/authorization_not_owner_drop_tab.q b/ql/src/test/queries/clientnegative/authorization_not_owner_drop_tab.q new file mode 100644 index 0000000..03eca67 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_not_owner_drop_tab.q @@ -0,0 +1,11 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +-- check if create table fails as different user +create table t1(i int); + +set user.name=user2; +drop table t1; + diff --git a/ql/src/test/queries/clientnegative/authorization_not_owner_drop_view.q b/ql/src/test/queries/clientnegative/authorization_not_owner_drop_view.q new file mode 100644 index 0000000..d92cc55 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_not_owner_drop_view.q @@ -0,0 +1,11 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +-- check if create table fails as different user +create table t1(i int); +create view vt1 as select * from t1; + +set user.name=user2; +drop view vt1; diff --git a/ql/src/test/queries/clientnegative/authorization_priv_current_role_neg.q b/ql/src/test/queries/clientnegative/authorization_priv_current_role_neg.q new file mode 100644 index 0000000..b8d5189 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_priv_current_role_neg.q @@ -0,0 +1,29 @@ +set hive.users.in.admin.role=hive_admin_user; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set user.name=hive_admin_user; +set role ADMIN; + +-- the test verifies that authorization is happening with privileges of the current roles + +-- grant privileges with grant option for table to role2 +create role role2; +grant role role2 to user user2; +create table tpriv_current_role(i int); +grant all on table tpriv_current_role to role role2 with grant option; + +set user.name=user2; +-- switch to user2 + +-- by default all roles should be in current roles, and grant to new user should work +show current roles; +grant all on table tpriv_current_role to user user3; + +set role role2; +-- switch to role2, grant should work +grant all on table tpriv_current_role to user user4; +show grant user user4 on table tpriv_current_role; + +set role PUBLIC; +-- set role to public, should fail as role2 is not one of the current roles +grant all on table tpriv_current_role to user user5; diff --git a/ql/src/test/queries/clientnegative/authorization_select.q b/ql/src/test/queries/clientnegative/authorization_select.q new file mode 100644 index 0000000..721de69 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_select.q @@ -0,0 +1,9 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +-- check query without select privilege fails +create table t1(i int); + +set user.name=user1; +select * from t1; diff --git a/ql/src/test/queries/clientnegative/authorization_select_view.q b/ql/src/test/queries/clientnegative/authorization_select_view.q new file mode 100644 index 0000000..ac526e3 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_select_view.q @@ -0,0 +1,11 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +-- check create view without select privileges +create table t1(i int); +create view v1 as select * from t1; +set user.name=user1; +select * from v1; + + diff --git a/ql/src/test/queries/clientnegative/authorization_set_role_neg1.q b/ql/src/test/queries/clientnegative/authorization_set_role_neg1.q new file mode 100644 index 0000000..482b8ea --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_set_role_neg1.q @@ -0,0 +1,6 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; + +-- an error should be thrown if 'set role ' is done for role that does not exist + +set role nosuchroleexists; + diff --git a/ql/src/test/queries/clientnegative/authorization_set_role_neg2.q b/ql/src/test/queries/clientnegative/authorization_set_role_neg2.q new file mode 100644 index 0000000..77fc8f4 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_set_role_neg2.q @@ -0,0 +1,16 @@ +set hive.users.in.admin.role=hive_admin_user; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set user.name=hive_admin_user; +set role ADMIN; + +-- an error should be thrown if 'set role ' is done for role that does not exist + +create role rset_role_neg; +grant role rset_role_neg to user user2; + +set user.name=user2; +set role rset_role_neg; +set role public; +set role nosuchroleexists;; + diff --git a/ql/src/test/queries/clientnegative/authorization_truncate.q b/ql/src/test/queries/clientnegative/authorization_truncate.q new file mode 100644 index 0000000..e7ba559 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_truncate.q @@ -0,0 +1,9 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +-- check add partition without insert privilege +create table t1(i int, j int); +set user.name=user1; +truncate table t1; + diff --git a/ql/src/test/queries/clientpositive/authorization_admin_almighty1.q b/ql/src/test/queries/clientpositive/authorization_admin_almighty1.q new file mode 100644 index 0000000..ba7bd98 --- /dev/null +++ b/ql/src/test/queries/clientpositive/authorization_admin_almighty1.q @@ -0,0 +1,17 @@ +set hive.users.in.admin.role=hive_admin_user; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set user.name=hive_test_user; + +-- actions from admin should work as if admin has all privileges + +create table t1(i int); +set user.name=hive_admin_user; + +show current roles; +set role ADMIN; +show current roles; +select * from t1; +grant all on table t1 to user user1; +show grant user user1 on table t1; +drop table t1; diff --git a/ql/src/test/queries/clientpositive/authorization_owner_actions.q b/ql/src/test/queries/clientpositive/authorization_owner_actions.q new file mode 100644 index 0000000..9f322af --- /dev/null +++ b/ql/src/test/queries/clientpositive/authorization_owner_actions.q @@ -0,0 +1,16 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +-- actions that require user to be table owner +create table t1(i int); + +ALTER TABLE t1 SET SERDEPROPERTIES ('field.delim' = ','); +drop table t1; + +create table t1(i int); +create view vt1 as select * from t1; + +drop view vt1; +alter table t1 rename to tnew1; diff --git a/ql/src/test/queries/clientpositive/authorization_revoke_table_priv.q b/ql/src/test/queries/clientpositive/authorization_revoke_table_priv.q index bf5db29..f91e100 100644 --- a/ql/src/test/queries/clientpositive/authorization_revoke_table_priv.q +++ b/ql/src/test/queries/clientpositive/authorization_revoke_table_priv.q @@ -4,7 +4,7 @@ set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.Sessi set user.name=user1; -- current user has been set (comment line before the set cmd is resulting in parse error!!) -CREATE TABLE table_priv_rev(i int); +CREATE TABLE table_priv_rev(i int); -- grant insert privilege to user2 GRANT INSERT ON table_priv_rev TO USER user2; @@ -48,3 +48,10 @@ SHOW GRANT USER user2 ON TABLE table_priv_rev; REVOKE SELECT ON TABLE table_priv_rev FROM USER user2; SHOW GRANT USER user2 ON TABLE table_priv_rev; + +-- grant all followed by revoke all +GRANT ALL ON table_priv_rev TO USER user2; +SHOW GRANT USER user2 ON TABLE table_priv_rev; + +REVOKE ALL ON TABLE table_priv_rev FROM USER user2; +SHOW GRANT USER user2 ON TABLE table_priv_rev; diff --git a/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q b/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q new file mode 100644 index 0000000..f89e624 --- /dev/null +++ b/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q @@ -0,0 +1,35 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=user1; + +create table t1(i int, j int, k int); + +-- protecting certain columns +create view vt1 as select i,k from t1; + +-- protecting certain rows +create view vt2 as select * from t1 where i > 1; + +--view grant to user + +grant select on view vt1 to user user2; +grant insert on view vt1 to user user3; + +show grant user user2 on table vt1; +show grant user user3 on table vt1; + +set user.name=user2; +select * from vt1; + +set user.name=user1; + +grant all on view vt2 to user user2; +show grant user user2 on table vt2; + +revoke all on view vt2 from user user2; +show grant user user2 on table vt2; + +revoke select on view vt1 from user user2; +show grant user user2 on table vt1; +show grant user user3 on table vt1; diff --git a/ql/src/test/results/clientnegative/authorization_addpartition.q.out b/ql/src/test/results/clientnegative/authorization_addpartition.q.out new file mode 100644 index 0000000..f4d3b4f --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_addpartition.q.out @@ -0,0 +1,8 @@ +PREHOOK: query: -- check add partition without insert privilege +create table tpart(i int, j int) partitioned by (k string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check add partition without insert privilege +create table tpart(i int, j int) partitioned by (k string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tpart +FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.tpart] : [INSERT] diff --git a/ql/src/test/results/clientnegative/authorization_createview.q.out b/ql/src/test/results/clientnegative/authorization_createview.q.out new file mode 100644 index 0000000..cb81b83 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_createview.q.out @@ -0,0 +1,8 @@ +PREHOOK: query: -- check create view without select privileges +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check create view without select privileges +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] : [SELECT with grant] diff --git a/ql/src/test/results/clientnegative/authorization_ctas.q.out b/ql/src/test/results/clientnegative/authorization_ctas.q.out new file mode 100644 index 0000000..1070468 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_ctas.q.out @@ -0,0 +1,8 @@ +PREHOOK: query: -- check query without select privilege fails +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check query without select privilege fails +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] : [SELECT] diff --git a/ql/src/test/results/clientnegative/authorization_droppartition.q.out b/ql/src/test/results/clientnegative/authorization_droppartition.q.out new file mode 100644 index 0000000..7de553b --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_droppartition.q.out @@ -0,0 +1,15 @@ +PREHOOK: query: -- check drop partition without delete privilege +create table tpart(i int, j int) partitioned by (k string) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check drop partition without delete privilege +create table tpart(i int, j int) partitioned by (k string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tpart +#### A masked pattern was here #### +PREHOOK: type: ALTERTABLE_ADDPARTS +PREHOOK: Input: default@tpart +#### A masked pattern was here #### +POSTHOOK: type: ALTERTABLE_ADDPARTS +POSTHOOK: Input: default@tpart +POSTHOOK: Output: default@tpart@k=abc +FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.tpart] : [DELETE] diff --git a/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out b/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out index db0c36f..0e17c94 100644 --- a/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out +++ b/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out @@ -19,4 +19,4 @@ PREHOOK: query: -- try grant all to user3, without having all privileges GRANT ALL ON table_priv_allf TO USER user3 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@table_priv_allf -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied. Principal [name=user2, type=USER] does not have following privileges on Hive Object [type=TABLE, dbname=default, table/viewname=table_priv_allf] : [SELECT with grant, UPDATE with grant, DELETE with grant] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.table_priv_allf] : [SELECT with grant, UPDATE with grant, DELETE with grant] diff --git a/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out b/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out index 156c555..0c83849 100644 --- a/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out +++ b/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out @@ -11,4 +11,4 @@ PREHOOK: query: -- try grant insert to user3 as user2 GRANT INSERT ON table_priv_gfail1 TO USER user3 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@table_priv_gfail1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied. Principal [name=user2, type=USER] does not have following privileges on Hive Object [type=TABLE, dbname=default, table/viewname=table_priv_gfail1] : [INSERT with grant] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1] : [INSERT with grant] diff --git a/ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out b/ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out index e584e06..129b5fa 100644 --- a/ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out +++ b/ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out @@ -19,4 +19,4 @@ PREHOOK: query: -- try grant insert to user3 GRANT INSERT ON table_priv_gfail1 TO USER user3 PREHOOK: type: GRANT_PRIVILEGE PREHOOK: Output: default@table_priv_gfail1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied. Principal [name=user2, type=USER] does not have following privileges on Hive Object [type=TABLE, dbname=default, table/viewname=table_priv_gfail1] : [INSERT with grant] +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1] : [INSERT with grant] diff --git a/ql/src/test/results/clientnegative/authorization_insert_noinspriv.q.out b/ql/src/test/results/clientnegative/authorization_insert_noinspriv.q.out new file mode 100644 index 0000000..6d510f1 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_insert_noinspriv.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: -- check insert without select priv +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check insert without select priv +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +PREHOOK: query: create table user2tab(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table user2tab(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@user2tab +FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] : [INSERT, DELETE] diff --git a/ql/src/test/results/clientnegative/authorization_insert_noselectpriv.q.out b/ql/src/test/results/clientnegative/authorization_insert_noselectpriv.q.out new file mode 100644 index 0000000..5b9b93a --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_insert_noselectpriv.q.out @@ -0,0 +1,13 @@ +PREHOOK: query: -- check insert without select priv +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check insert without select priv +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +PREHOOK: query: create table t2(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table t2(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t2 +FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] : [SELECT] diff --git a/ql/src/test/results/clientnegative/authorization_not_owner_alter_tab_rename.q.out b/ql/src/test/results/clientnegative/authorization_not_owner_alter_tab_rename.q.out new file mode 100644 index 0000000..e41702a --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_not_owner_alter_tab_rename.q.out @@ -0,0 +1,8 @@ +PREHOOK: query: -- check if alter table fails as different user +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check if alter table fails as different user +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +FAILED: HiveAccessControlException Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] : [OBJECT OWNERSHIP] diff --git a/ql/src/test/results/clientnegative/authorization_not_owner_alter_tab_serdeprop.q.out b/ql/src/test/results/clientnegative/authorization_not_owner_alter_tab_serdeprop.q.out new file mode 100644 index 0000000..e41702a --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_not_owner_alter_tab_serdeprop.q.out @@ -0,0 +1,8 @@ +PREHOOK: query: -- check if alter table fails as different user +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check if alter table fails as different user +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +FAILED: HiveAccessControlException Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] : [OBJECT OWNERSHIP] diff --git a/ql/src/test/results/clientnegative/authorization_not_owner_drop_tab.q.out b/ql/src/test/results/clientnegative/authorization_not_owner_drop_tab.q.out new file mode 100644 index 0000000..b456aca --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_not_owner_drop_tab.q.out @@ -0,0 +1,8 @@ +PREHOOK: query: -- check if create table fails as different user +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check if create table fails as different user +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +FAILED: HiveAccessControlException Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] : [OBJECT OWNERSHIP] diff --git a/ql/src/test/results/clientnegative/authorization_not_owner_drop_view.q.out b/ql/src/test/results/clientnegative/authorization_not_owner_drop_view.q.out new file mode 100644 index 0000000..2433846 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_not_owner_drop_view.q.out @@ -0,0 +1,15 @@ +PREHOOK: query: -- check if create table fails as different user +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check if create table fails as different user +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +PREHOOK: query: create view vt1 as select * from t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@t1 +POSTHOOK: query: create view vt1 as select * from t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@t1 +POSTHOOK: Output: default@vt1 +FAILED: HiveAccessControlException Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.vt1] : [OBJECT OWNERSHIP] diff --git a/ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out b/ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out new file mode 100644 index 0000000..f932a3d --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out @@ -0,0 +1,77 @@ +PREHOOK: query: set role ADMIN +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role ADMIN +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: -- the test verifies that authorization is happening with privileges of the current roles + +-- grant privileges with grant option for table to role2 +create role role2 +PREHOOK: type: CREATEROLE +POSTHOOK: query: -- the test verifies that authorization is happening with privileges of the current roles + +-- grant privileges with grant option for table to role2 +create role role2 +POSTHOOK: type: CREATEROLE +PREHOOK: query: grant role role2 to user user2 +PREHOOK: type: GRANT_ROLE +POSTHOOK: query: grant role role2 to user user2 +POSTHOOK: type: GRANT_ROLE +PREHOOK: query: create table tpriv_current_role(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table tpriv_current_role(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@tpriv_current_role +PREHOOK: query: grant all on table tpriv_current_role to role role2 with grant option +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@tpriv_current_role +POSTHOOK: query: grant all on table tpriv_current_role to role role2 with grant option +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@tpriv_current_role +PREHOOK: query: -- switch to user2 + +-- by default all roles should be in current roles, and grant to new user should work +show current roles +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: -- switch to user2 + +-- by default all roles should be in current roles, and grant to new user should work +show current roles +POSTHOOK: type: SHOW_ROLES +role2 +PUBLIC + +PREHOOK: query: grant all on table tpriv_current_role to user user3 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@tpriv_current_role +POSTHOOK: query: grant all on table tpriv_current_role to user user3 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@tpriv_current_role +PREHOOK: query: set role role2 +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role role2 +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: -- switch to role2, grant should work +grant all on table tpriv_current_role to user user4 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@tpriv_current_role +POSTHOOK: query: -- switch to role2, grant should work +grant all on table tpriv_current_role to user user4 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@tpriv_current_role +PREHOOK: query: show grant user user4 on table tpriv_current_role +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user4 on table tpriv_current_role +POSTHOOK: type: SHOW_GRANT +default tpriv_current_role user4 USER DELETE false -1 user2 +default tpriv_current_role user4 USER INSERT false -1 user2 +default tpriv_current_role user4 USER SELECT false -1 user2 +default tpriv_current_role user4 USER UPDATE false -1 user2 +PREHOOK: query: set role PUBLIC +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role PUBLIC +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: -- set role to public, should fail as role2 is not one of the current roles +grant all on table tpriv_current_role to user user5 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@tpriv_current_role +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.tpriv_current_role] : [SELECT with grant, INSERT with grant, UPDATE with grant, DELETE with grant] diff --git a/ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out b/ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out index 696f29b..0f4c966 100644 --- a/ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out +++ b/ql/src/test/results/clientnegative/authorization_revoke_table_fail1.q.out @@ -19,5 +19,5 @@ PREHOOK: query: -- try dropping the privilege as user3 REVOKE INSERT ON TABLE table_priv_rfail1 FROM USER user2 PREHOOK: type: REVOKE_PRIVILEGE PREHOOK: Output: default@table_priv_rfail1 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Hive Object [type=TABLE, dbname=default, table/viewname=table_priv_rfail1] granted by user3 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Object [type=TABLE_OR_VIEW, name=default.table_priv_rfail1] granted by user3 diff --git a/ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out b/ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out index 5fbfd17..c671c8a 100644 --- a/ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out +++ b/ql/src/test/results/clientnegative/authorization_revoke_table_fail2.q.out @@ -33,5 +33,5 @@ PREHOOK: query: -- try dropping the privilege as user3 REVOKE INSERT ON TABLE table_priv_rfai2 FROM USER user2 PREHOOK: type: REVOKE_PRIVILEGE PREHOOK: Output: default@table_priv_rfai2 -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Hive Object [type=TABLE, dbname=default, table/viewname=table_priv_rfai2] granted by user3 +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Cannot find privilege Privilege [name=INSERT, columns=null] for Principal [name=user2, type=USER] on Object [type=TABLE_OR_VIEW, name=default.table_priv_rfai2] granted by user3 diff --git a/ql/src/test/results/clientnegative/authorization_select.q.out b/ql/src/test/results/clientnegative/authorization_select.q.out new file mode 100644 index 0000000..1070468 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_select.q.out @@ -0,0 +1,8 @@ +PREHOOK: query: -- check query without select privilege fails +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check query without select privilege fails +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] : [SELECT] diff --git a/ql/src/test/results/clientnegative/authorization_select_view.q.out b/ql/src/test/results/clientnegative/authorization_select_view.q.out new file mode 100644 index 0000000..e70a79c --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_select_view.q.out @@ -0,0 +1,15 @@ +PREHOOK: query: -- check create view without select privileges +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check create view without select privileges +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +PREHOOK: query: create view v1 as select * from t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@t1 +POSTHOOK: query: create view v1 as select * from t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@t1 +POSTHOOK: Output: default@v1 +FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.v1] : [SELECT] diff --git a/ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out b/ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out new file mode 100644 index 0000000..b1c647d --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_set_role_neg1.q.out @@ -0,0 +1,5 @@ +PREHOOK: query: -- an error should be thrown if 'set role ' is done for role that does not exist + +set role nosuchroleexists +PREHOOK: type: SHOW_ROLES +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. hive_test_user doesn't belong to role nosuchroleexists diff --git a/ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out b/ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out new file mode 100644 index 0000000..eec684d --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_set_role_neg2.q.out @@ -0,0 +1,19 @@ +PREHOOK: query: set role ADMIN +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role ADMIN +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: -- an error should be thrown if 'set role ' is done for role that does not exist + +create role rset_role_neg +PREHOOK: type: CREATEROLE +POSTHOOK: query: -- an error should be thrown if 'set role ' is done for role that does not exist + +create role rset_role_neg +POSTHOOK: type: CREATEROLE +PREHOOK: query: grant role rset_role_neg to user user2 +PREHOOK: type: GRANT_ROLE +POSTHOOK: query: grant role rset_role_neg to user user2 +POSTHOOK: type: GRANT_ROLE +PREHOOK: query: set role rset_role_neg +PREHOOK: type: SHOW_ROLES +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. hive_admin_user doesn't belong to role rset_role_neg diff --git a/ql/src/test/results/clientnegative/authorization_truncate.q.out b/ql/src/test/results/clientnegative/authorization_truncate.q.out new file mode 100644 index 0000000..c188831 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_truncate.q.out @@ -0,0 +1,8 @@ +PREHOOK: query: -- check add partition without insert privilege +create table t1(i int, j int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- check add partition without insert privilege +create table t1(i int, j int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] : [OBJECT OWNERSHIP] diff --git a/ql/src/test/results/clientpositive/authorization_1_sql_std.q.out b/ql/src/test/results/clientpositive/authorization_1_sql_std.q.out index 9d08027..a219478 100644 --- a/ql/src/test/results/clientpositive/authorization_1_sql_std.q.out +++ b/ql/src/test/results/clientpositive/authorization_1_sql_std.q.out @@ -46,8 +46,8 @@ PREHOOK: query: show role grant user user_sauth PREHOOK: type: SHOW_ROLE_GRANT POSTHOOK: query: show role grant user user_sauth POSTHOOK: type: SHOW_ROLE_GRANT -src_role -1 user_sauth USER false -1 hive_admin_user PUBLIC -1 false -1 +src_role -1 user_sauth USER false -1 hive_admin_user PREHOOK: query: --table grant to role grant select on table src_autho_test to role src_role diff --git a/ql/src/test/results/clientpositive/authorization_admin_almighty1.q.out b/ql/src/test/results/clientpositive/authorization_admin_almighty1.q.out new file mode 100644 index 0000000..6fc4897 --- /dev/null +++ b/ql/src/test/results/clientpositive/authorization_admin_almighty1.q.out @@ -0,0 +1,55 @@ +PREHOOK: query: -- actions from admin should work as if admin has all privileges + +create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: -- actions from admin should work as if admin has all privileges + +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +PREHOOK: query: show current roles +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: show current roles +POSTHOOK: type: SHOW_ROLES +PUBLIC + +PREHOOK: query: set role ADMIN +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role ADMIN +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: show current roles +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: show current roles +POSTHOOK: type: SHOW_ROLES +ADMIN + +PREHOOK: query: select * from t1 +PREHOOK: type: QUERY +PREHOOK: Input: default@t1 +#### A masked pattern was here #### +POSTHOOK: query: select * from t1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 +#### A masked pattern was here #### +PREHOOK: query: grant all on table t1 to user user1 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@t1 +POSTHOOK: query: grant all on table t1 to user user1 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@t1 +PREHOOK: query: show grant user user1 on table t1 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user1 on table t1 +POSTHOOK: type: SHOW_GRANT +default t1 user1 USER DELETE false -1 hive_admin_user +default t1 user1 USER INSERT false -1 hive_admin_user +default t1 user1 USER SELECT false -1 hive_admin_user +default t1 user1 USER UPDATE false -1 hive_admin_user +PREHOOK: query: drop table t1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@t1 +PREHOOK: Output: default@t1 +POSTHOOK: query: drop table t1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@t1 +POSTHOOK: Output: default@t1 diff --git a/ql/src/test/results/clientpositive/authorization_owner_actions.q.out b/ql/src/test/results/clientpositive/authorization_owner_actions.q.out new file mode 100644 index 0000000..92b8c62 --- /dev/null +++ b/ql/src/test/results/clientpositive/authorization_owner_actions.q.out @@ -0,0 +1,52 @@ +#### A masked pattern was here #### +create table t1(i int) +PREHOOK: type: CREATETABLE +#### A masked pattern was here #### +create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +PREHOOK: query: ALTER TABLE t1 SET SERDEPROPERTIES ('field.delim' = ',') +PREHOOK: type: ALTERTABLE_SERDEPROPERTIES +PREHOOK: Input: default@t1 +PREHOOK: Output: default@t1 +POSTHOOK: query: ALTER TABLE t1 SET SERDEPROPERTIES ('field.delim' = ',') +POSTHOOK: type: ALTERTABLE_SERDEPROPERTIES +POSTHOOK: Input: default@t1 +POSTHOOK: Output: default@t1 +PREHOOK: query: drop table t1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@t1 +PREHOOK: Output: default@t1 +POSTHOOK: query: drop table t1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@t1 +POSTHOOK: Output: default@t1 +PREHOOK: query: create table t1(i int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table t1(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +PREHOOK: query: create view vt1 as select * from t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@t1 +POSTHOOK: query: create view vt1 as select * from t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@t1 +POSTHOOK: Output: default@vt1 +PREHOOK: query: drop view vt1 +PREHOOK: type: DROPVIEW +PREHOOK: Input: default@vt1 +PREHOOK: Output: default@vt1 +POSTHOOK: query: drop view vt1 +POSTHOOK: type: DROPVIEW +POSTHOOK: Input: default@vt1 +POSTHOOK: Output: default@vt1 +PREHOOK: query: alter table t1 rename to tnew1 +PREHOOK: type: ALTERTABLE_RENAME +PREHOOK: Input: default@t1 +PREHOOK: Output: default@t1 +POSTHOOK: query: alter table t1 rename to tnew1 +POSTHOOK: type: ALTERTABLE_RENAME +POSTHOOK: Input: default@t1 +POSTHOOK: Output: default@t1 +POSTHOOK: Output: default@tnew1 diff --git a/ql/src/test/results/clientpositive/authorization_revoke_table_priv.q.out b/ql/src/test/results/clientpositive/authorization_revoke_table_priv.q.out index 7ea601d..ae7e716 100644 --- a/ql/src/test/results/clientpositive/authorization_revoke_table_priv.q.out +++ b/ql/src/test/results/clientpositive/authorization_revoke_table_priv.q.out @@ -1,10 +1,10 @@ PREHOOK: query: -- current user has been set (comment line before the set cmd is resulting in parse error!!) -CREATE TABLE table_priv_rev(i int) +CREATE TABLE table_priv_rev(i int) PREHOOK: type: CREATETABLE POSTHOOK: query: -- current user has been set (comment line before the set cmd is resulting in parse error!!) -CREATE TABLE table_priv_rev(i int) +CREATE TABLE table_priv_rev(i int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@table_priv_rev PREHOOK: query: -- grant insert privilege to user2 @@ -74,8 +74,8 @@ PREHOOK: type: SHOW_GRANT POSTHOOK: query: SHOW GRANT USER user2 ON TABLE table_priv_rev POSTHOOK: type: SHOW_GRANT default table_priv_rev user2 USER INSERT false -1 user1 -default table_priv_rev user2 USER UPDATE false -1 user1 default table_priv_rev user2 USER SELECT true -1 user1 +default table_priv_rev user2 USER UPDATE false -1 user1 PREHOOK: query: -- grant delete privilege to user2 GRANT DELETE ON table_priv_rev TO USER user2 PREHOOK: type: GRANT_PRIVILEGE @@ -90,8 +90,8 @@ POSTHOOK: query: SHOW GRANT USER user2 ON TABLE table_priv_rev POSTHOOK: type: SHOW_GRANT default table_priv_rev user2 USER DELETE false -1 user1 default table_priv_rev user2 USER INSERT false -1 user1 -default table_priv_rev user2 USER UPDATE false -1 user1 default table_priv_rev user2 USER SELECT true -1 user1 +default table_priv_rev user2 USER UPDATE false -1 user1 PREHOOK: query: -- start revoking -- -- revoke update privilege from user2 REVOKE UPDATE ON TABLE table_priv_rev FROM USER user2 @@ -148,3 +148,29 @@ PREHOOK: query: SHOW GRANT USER user2 ON TABLE table_priv_rev PREHOOK: type: SHOW_GRANT POSTHOOK: query: SHOW GRANT USER user2 ON TABLE table_priv_rev POSTHOOK: type: SHOW_GRANT +PREHOOK: query: -- grant all followed by revoke all +GRANT ALL ON table_priv_rev TO USER user2 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@table_priv_rev +POSTHOOK: query: -- grant all followed by revoke all +GRANT ALL ON table_priv_rev TO USER user2 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@table_priv_rev +PREHOOK: query: SHOW GRANT USER user2 ON TABLE table_priv_rev +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: SHOW GRANT USER user2 ON TABLE table_priv_rev +POSTHOOK: type: SHOW_GRANT +default table_priv_rev user2 USER DELETE false -1 user1 +default table_priv_rev user2 USER INSERT false -1 user1 +default table_priv_rev user2 USER SELECT false -1 user1 +default table_priv_rev user2 USER UPDATE false -1 user1 +PREHOOK: query: REVOKE ALL ON TABLE table_priv_rev FROM USER user2 +PREHOOK: type: REVOKE_PRIVILEGE +PREHOOK: Output: default@table_priv_rev +POSTHOOK: query: REVOKE ALL ON TABLE table_priv_rev FROM USER user2 +POSTHOOK: type: REVOKE_PRIVILEGE +POSTHOOK: Output: default@table_priv_rev +PREHOOK: query: SHOW GRANT USER user2 ON TABLE table_priv_rev +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: SHOW GRANT USER user2 ON TABLE table_priv_rev +POSTHOOK: type: SHOW_GRANT diff --git a/ql/src/test/results/clientpositive/authorization_role_grant1.q.out b/ql/src/test/results/clientpositive/authorization_role_grant1.q.out index 10ef46b..48e0f59 100644 --- a/ql/src/test/results/clientpositive/authorization_role_grant1.q.out +++ b/ql/src/test/results/clientpositive/authorization_role_grant1.q.out @@ -18,8 +18,8 @@ PREHOOK: query: show role grant user user2 PREHOOK: type: SHOW_ROLE_GRANT POSTHOOK: query: show role grant user user2 POSTHOOK: type: SHOW_ROLE_GRANT -src_role2 -1 user2 USER false -1 hive_admin_user PUBLIC -1 false -1 +src_role2 -1 user2 USER false -1 hive_admin_user PREHOOK: query: show roles PREHOOK: type: SHOW_ROLES POSTHOOK: query: show roles @@ -67,8 +67,8 @@ PREHOOK: query: show role grant user user2 PREHOOK: type: SHOW_ROLE_GRANT POSTHOOK: query: show role grant user user2 POSTHOOK: type: SHOW_ROLE_GRANT -src_role_wadmin -1 user2 USER true -1 hive_admin_user PUBLIC -1 false -1 +src_role_wadmin -1 user2 USER true -1 hive_admin_user PREHOOK: query: -- revoke role without role keyword revoke src_role_wadmin from user user2 PREHOOK: type: REVOKE_ROLE diff --git a/ql/src/test/results/clientpositive/authorization_view_sqlstd.q.out b/ql/src/test/results/clientpositive/authorization_view_sqlstd.q.out new file mode 100644 index 0000000..3bbb015 --- /dev/null +++ b/ql/src/test/results/clientpositive/authorization_view_sqlstd.q.out @@ -0,0 +1,98 @@ +PREHOOK: query: create table t1(i int, j int, k int) +PREHOOK: type: CREATETABLE +POSTHOOK: query: create table t1(i int, j int, k int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@t1 +PREHOOK: query: -- protecting certain columns +create view vt1 as select i,k from t1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@t1 +POSTHOOK: query: -- protecting certain columns +create view vt1 as select i,k from t1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@t1 +POSTHOOK: Output: default@vt1 +PREHOOK: query: -- protecting certain rows +create view vt2 as select * from t1 where i > 1 +PREHOOK: type: CREATEVIEW +PREHOOK: Input: default@t1 +POSTHOOK: query: -- protecting certain rows +create view vt2 as select * from t1 where i > 1 +POSTHOOK: type: CREATEVIEW +POSTHOOK: Input: default@t1 +POSTHOOK: Output: default@vt2 +PREHOOK: query: --view grant to user + +grant select on view vt1 to user user2 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@vt1 +POSTHOOK: query: --view grant to user + +grant select on view vt1 to user user2 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@vt1 +PREHOOK: query: grant insert on view vt1 to user user3 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@vt1 +POSTHOOK: query: grant insert on view vt1 to user user3 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@vt1 +PREHOOK: query: show grant user user2 on table vt1 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on table vt1 +POSTHOOK: type: SHOW_GRANT +default vt1 user2 USER SELECT false -1 user1 +PREHOOK: query: show grant user user3 on table vt1 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user3 on table vt1 +POSTHOOK: type: SHOW_GRANT +default vt1 user3 USER INSERT false -1 user1 +PREHOOK: query: select * from vt1 +PREHOOK: type: QUERY +PREHOOK: Input: default@t1 +PREHOOK: Input: default@vt1 +#### A masked pattern was here #### +POSTHOOK: query: select * from vt1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t1 +POSTHOOK: Input: default@vt1 +#### A masked pattern was here #### +PREHOOK: query: grant all on view vt2 to user user2 +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@vt2 +POSTHOOK: query: grant all on view vt2 to user user2 +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@vt2 +PREHOOK: query: show grant user user2 on table vt2 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on table vt2 +POSTHOOK: type: SHOW_GRANT +default vt2 user2 USER DELETE false -1 user1 +default vt2 user2 USER INSERT false -1 user1 +default vt2 user2 USER SELECT false -1 user1 +default vt2 user2 USER UPDATE false -1 user1 +PREHOOK: query: revoke all on view vt2 from user user2 +PREHOOK: type: REVOKE_PRIVILEGE +PREHOOK: Output: default@vt2 +POSTHOOK: query: revoke all on view vt2 from user user2 +POSTHOOK: type: REVOKE_PRIVILEGE +POSTHOOK: Output: default@vt2 +PREHOOK: query: show grant user user2 on table vt2 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on table vt2 +POSTHOOK: type: SHOW_GRANT +PREHOOK: query: revoke select on view vt1 from user user2 +PREHOOK: type: REVOKE_PRIVILEGE +PREHOOK: Output: default@vt1 +POSTHOOK: query: revoke select on view vt1 from user user2 +POSTHOOK: type: REVOKE_PRIVILEGE +POSTHOOK: Output: default@vt1 +PREHOOK: query: show grant user user2 on table vt1 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user2 on table vt1 +POSTHOOK: type: SHOW_GRANT +PREHOOK: query: show grant user user3 on table vt1 +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: show grant user user3 on table vt1 +POSTHOOK: type: SHOW_GRANT +default vt1 user3 USER INSERT false -1 user1