diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 65744ac..d3456cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -96,12 +96,10 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; -import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; import org.apache.hadoop.hive.ql.security.authorization.plugin.QueryContext; import org.apache.hadoop.hive.ql.session.OperationLog; import org.apache.hadoop.hive.ql.session.OperationLog.LoggingLevel; @@ -842,7 +840,7 @@ private static void doAuthorizationV2(SessionState ss, HiveOperation op, Set> updateTab2Cols) throws HiveException { /* comment for reviewers -> updateTab2Cols needed to be separate from tab2cols because if I - pass tab2cols to getHivePrivObjects for the output case it will trip up insert/selects, + pass tab2cols to getHivePrivObjectsFromEntity for the output case it will trip up insert/selects, since the insert will get passed the columns from the select. */ @@ -851,74 +849,16 @@ private static void doAuthorizationV2(SessionState ss, HiveOperation op, Set inputsHObjs = getHivePrivObjects(inputs, tab2cols); - List outputHObjs = getHivePrivObjects(outputs, updateTab2Cols); - - ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs, authzContextBuilder.build()); - } - - private static List getHivePrivObjects( - Set privObjects, Map> tableName2Cols) { - List hivePrivobjs = new ArrayList(); - if(privObjects == null){ - return hivePrivobjs; - } - for(Entity privObject : privObjects){ - HivePrivilegeObjectType privObjType = - AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType()); - if(privObject.isDummy()) { - //do not authorize dummy readEntity or writeEntity - continue; - } - if(privObject instanceof ReadEntity && !((ReadEntity)privObject).isDirect()){ - // In case of views, the underlying views or tables are not direct dependencies - // and are not used for authorization checks. - // This ReadEntity represents one of the underlying tables/views, so skip it. - // See description of the isDirect in ReadEntity - continue; - } - if(privObject instanceof WriteEntity && ((WriteEntity)privObject).isTempURI()){ - //do not authorize temporary uris - continue; - } - //support for authorization on partitions needs to be added - String dbname = null; - String objName = null; - List partKeys = null; - List columns = null; - switch(privObject.getType()){ - case DATABASE: - dbname = privObject.getDatabase().getName(); - break; - case TABLE: - dbname = privObject.getTable().getDbName(); - objName = privObject.getTable().getTableName(); - columns = tableName2Cols == null ? null : - tableName2Cols.get(Table.getCompleteName(dbname, objName)); - break; - case DFS_DIR: - case LOCAL_DIR: - objName = privObject.getD().toString(); - break; - case FUNCTION: - if(privObject.getDatabase() != null) { - dbname = privObject.getDatabase().getName(); - } - objName = privObject.getFunctionName(); - break; - case DUMMYPARTITION: - case PARTITION: - // not currently handled - continue; - default: - throw new AssertionError("Unexpected object type"); - } - HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject); - HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, dbname, objName, - partKeys, columns, actionType, null); - hivePrivobjs.add(hPrivObject); - } - return hivePrivobjs; + HiveAuthorizationTranslator translator = + (HiveAuthorizationTranslator) ss.getAuthorizerV2() + .getHiveAuthorizationTranslator(); + List inputsHObjs = + translator.getHivePrivObjectsFromEntity(inputs, tab2cols); + List outputHObjs = + translator.getHivePrivObjectsFromEntity(outputs, updateTab2Cols); + + ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs, + authzContextBuilder.build()); } private static HiveOperationType getHiveOperationType(HiveOperation op) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java index 174b5a8..c999eb3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java @@ -41,7 +41,7 @@ * The type of the entity. */ public static enum Type { - DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR, FUNCTION + GLOBAL, DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR, FUNCTION } /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java index 319a801..d1259be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationTranslator.java @@ -19,9 +19,15 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.Set; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.hooks.Entity; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.plan.PrincipalDesc; import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc; @@ -77,5 +83,69 @@ public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjec return new HivePrivilegeObject(objectType, dbTable[0], dbTable[1], partSpec, columns, null); } - + @Override + public List getHivePrivObjectsFromEntity( + Set privObjects, + Map> tableName2Cols) { + List hivePrivobjs = new ArrayList(); + if(privObjects == null){ + return hivePrivobjs; + } + for(Entity privObject : privObjects){ + HivePrivilegeObjectType privObjType = + AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType()); + if(privObject.isDummy()) { + //do not authorize dummy readEntity or writeEntity + continue; + } + if(privObject instanceof ReadEntity && !((ReadEntity)privObject).isDirect()){ + // In case of views, the underlying views or tables are not direct dependencies + // and are not used for authorization checks. + // This ReadEntity represents one of the underlying tables/views, so skip it. + // See description of the isDirect in ReadEntity + continue; + } + if(privObject instanceof WriteEntity && ((WriteEntity)privObject).isTempURI()){ + //do not authorize temporary uris + continue; + } + //support for authorization on partitions needs to be added + String dbname = null; + String objName = null; + List partKeys = null; + List columns = null; + switch(privObject.getType()){ + case DATABASE: + dbname = privObject.getDatabase().getName(); + break; + case TABLE: + dbname = privObject.getTable().getDbName(); + objName = privObject.getTable().getTableName(); + columns = tableName2Cols == null ? null : + tableName2Cols.get(Table.getCompleteName(dbname, objName)); + break; + case DFS_DIR: + case LOCAL_DIR: + objName = privObject.getD().toString(); + break; + case FUNCTION: + if(privObject.getDatabase() != null) { + dbname = privObject.getDatabase().getName(); + } + objName = privObject.getFunctionName(); + break; + case DUMMYPARTITION: + case PARTITION: + // not currently handled + continue; + default: + throw new AssertionError("Unexpected object type"); + } + HivePrivilegeObject.HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject); + HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, dbname, objName, + partKeys, columns, actionType, null); + hivePrivobjs.add(hPrivObject); + } + return hivePrivobjs; + } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java index 540f1f3..f92bd6b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationTranslator.java @@ -17,8 +17,13 @@ */ package org.apache.hadoop.hive.ql.security.authorization.plugin; +import java.util.List; +import java.util.Map; +import java.util.Set; + import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate; import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; +import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.PrincipalDesc; import org.apache.hadoop.hive.ql.plan.PrivilegeDesc; @@ -32,7 +37,7 @@ * The API uses several classes that are considered internal to Hive, and it is * subject to change across releases. */ -@LimitedPrivate(value = { "Apache Sentry (incubating)" }) +@LimitedPrivate(value = { "Apache Sentry" }) @Evolving public interface HiveAuthorizationTranslator { @@ -43,4 +48,8 @@ public HivePrincipal getHivePrincipal(PrincipalDesc principal) public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privObject) throws HiveException; + + public List getHivePrivObjectsFromEntity( + Set privObjects, + Map> tableName2Cols); }