diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 72c04d3..bd95161 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -55,6 +55,7 @@ import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; import org.apache.hadoop.hive.ql.hooks.Entity; +import org.apache.hadoop.hive.ql.hooks.Entity.Type; import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext; import org.apache.hadoop.hive.ql.hooks.Hook; import org.apache.hadoop.hive.ql.hooks.HookContext; @@ -101,6 +102,9 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.serde2.ByteStream; @@ -523,6 +527,11 @@ private void doAuthorization(BaseSemanticAnalyzer sem) SessionState ss = SessionState.get(); HiveOperation op = ss.getHiveOperation(); Hive db = sem.getDb(); + if(ss.isAuthorizationModeV2()){ + doAuthorizationV2(ss, op, inputs, outputs); + return; + } + if (op != null) { if (op.equals(HiveOperation.CREATEDATABASE)) { ss.getAuthorizer().authorize( @@ -543,6 +552,7 @@ private void doAuthorization(BaseSemanticAnalyzer sem) } } if (outputs != null && outputs.size() > 0) { + //do authorization for each output for (WriteEntity write : outputs) { if (write.getType() == Entity.Type.DATABASE) { ss.getAuthorizer().authorize(write.getDatabase(), @@ -570,10 +580,10 @@ private void doAuthorization(BaseSemanticAnalyzer sem) } if (inputs != null && inputs.size() > 0) { - Map> tab2Cols = new HashMap>(); Map> part2Cols = new HashMap>(); + //determine if partition level privileges should be checked for input tables Map tableUsePartLevelAuth = new HashMap(); for (ReadEntity read : inputs) { if (read.getType() == Entity.Type.DATABASE) { @@ -596,6 +606,8 @@ private void doAuthorization(BaseSemanticAnalyzer sem) } } + //for a select or create-as-select query, populate the partition to column (par2Cols) or + // table to columns mapping (tab2Cols) if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.QUERY)) { SemanticAnalyzer querySem = (SemanticAnalyzer) sem; @@ -691,6 +703,49 @@ private void doAuthorization(BaseSemanticAnalyzer sem) } } + private void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet inputs, + HashSet outputs) { + HiveOperationType hiveOpType = getHiveOperationType(op); + List inputsHObjs = getHivePrivObjects(inputs); + List outputHObjs = getHivePrivObjects(outputs); + ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs); + return; + } + + private List getHivePrivObjects(HashSet inputs) { + List hivePrivobjs = new ArrayList(); + for(Entity input : inputs){ + HivePrivilegeObjectType privObjType = getHivePrivilegeObjectType(input.getType()); + //support for authorization on partitions or uri needs to be added + HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, + input.getDatabase().getName(), + input.getTable().getTableName()); + hivePrivobjs.add(hPrivObject); + } + return hivePrivobjs; + } + + private HivePrivilegeObjectType getHivePrivilegeObjectType(Type type) { + switch(type){ + case DATABASE: + return HivePrivilegeObjectType.DATABASE; + case TABLE: + return HivePrivilegeObjectType.TABLE; + case LOCAL_DIR: + case DFS_DIR: + return HivePrivilegeObjectType.URI; + case PARTITION: + case DUMMYPARTITION: //need to determine if a different type is needed for dummy partitions + return HivePrivilegeObjectType.PARTITION; + default: + return null; + } + } + + private HiveOperationType getHiveOperationType(HiveOperation op) { + return HiveOperationType.valueOf(op.name()); + } + /** * @return The current query plan associated with this Driver, if any. */ diff --git ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java index b36a4ca..f8be581 100644 --- ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java +++ ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java @@ -45,7 +45,7 @@ // 30000 to 39999: Runtime errors which Hive thinks may be transient and retrying may succeed. // 40000 to 49999: Errors where Hive is unable to advise about retries. // In addition to the error code, ErrorMsg also has a SQLState field. - // SQLStates are taken from Section 12.5 of ISO-9075. + // SQLStates are taken from Section 22.1 of ISO-9075. // See http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt // Most will just rollup to the generic syntax error state of 42000, but // specific errors can override the that state. @@ -53,6 +53,7 @@ // http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-error-sqlstates.html GENERIC_ERROR(40000, "Exception while processing"), + //========================== 10000 range starts here ========================// INVALID_TABLE(10001, "Table not found", "42S02"), INVALID_COLUMN(10002, "Invalid column reference"), INVALID_INDEX(10003, "Invalid index"), @@ -370,7 +371,11 @@ INVALID_DIR(10252, "{0} is not a directory", true), NO_VALID_LOCATIONS(10253, "Could not find any valid location to place the jars. " + "Please update hive.jar.directory or hive.user.install.directory with a valid location", false), + UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP(10254, + "Principal type GROUP is not supported in this authorization setting", "28000"), + INVALID_TABLE_NAME(10255, "Invalid table name {0}", true), + //========================== 20000 range starts here ========================// SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."), SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. " + "It may have crashed with an error."), @@ -382,6 +387,7 @@ "tried to create too many dynamic partitions. The maximum number of dynamic partitions " + "is controlled by hive.exec.max.dynamic.partitions and hive.exec.max.dynamic.partitions.pernode. "), + //========================== 30000 range starts here ========================// STATSPUBLISHER_NOT_OBTAINED(30000, "StatsPublisher cannot be obtained. " + "There was a error to retrieve the StatsPublisher, and retrying " + "might help. If you dont want the query to fail because accurate statistics " + diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 617bba8..fab8149 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -156,6 +156,12 @@ import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.Privilege; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; @@ -398,7 +404,8 @@ public int execute(DriverContext driverContext) { GrantDesc grantDesc = work.getGrantDesc(); if (grantDesc != null) { return grantOrRevokePrivileges(grantDesc.getPrincipals(), grantDesc - .getPrivileges(), grantDesc.getPrivilegeSubjectDesc(), grantDesc.getGrantor(), grantDesc.getGrantorType(), grantDesc.isGrantOption(), true); + .getPrivileges(), grantDesc.getPrivilegeSubjectDesc(), grantDesc.getGrantor(), + grantDesc.getGrantorType(), grantDesc.isGrantOption(), true); } RevokeDesc revokeDesc = work.getRevokeDesc(); @@ -488,6 +495,11 @@ private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL) boolean grantRole = grantOrRevokeRoleDDL.getGrant(); List principals = grantOrRevokeRoleDDL.getPrincipalDesc(); List roles = grantOrRevokeRoleDDL.getRoles(); + + if(SessionState.get().isAuthorizationModeV2()){ + return grantOrRevokeRoleV2(grantOrRevokeRoleDDL); + } + for (PrincipalDesc principal : principals) { String userName = principal.getName(); for (String roleName : roles) { @@ -506,6 +518,28 @@ private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL) return 0; } + private int grantOrRevokeRoleV2(GrantRevokeRoleDDL grantOrRevokeRoleDDL) throws HiveException { + HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + //convert to the types needed for plugin api + HivePrincipal grantorPrinc = null; + if(grantOrRevokeRoleDDL.getGrantor() != null){ + grantorPrinc = new HivePrincipal(grantOrRevokeRoleDDL.getGrantor(), + getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType())); + } + List hivePrincipals = getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc()); + List roles = grantOrRevokeRoleDDL.getRoles(); + + if(grantOrRevokeRoleDDL.getGrant()){ + authorizer.grantRole(hivePrincipals, roles, + grantOrRevokeRoleDDL.isGrantOption(), grantorPrinc); + } + else{ + authorizer.revokeRole(hivePrincipals, roles, + grantOrRevokeRoleDDL.isGrantOption(), grantorPrinc); + } + return 0; + } + private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException { StringBuilder builder = new StringBuilder(); try { @@ -513,6 +547,7 @@ private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException { PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj(); String principalName = principalDesc.getName(); if (hiveObjectDesc == null) { + //show all privileges for this user List users = db.showPrivilegeGrant( HiveObjectType.GLOBAL, principalName, principalDesc.getType(), null, null, null, null); @@ -618,7 +653,14 @@ public int compare(HiveObjectPrivilege one, HiveObjectPrivilege other) { private int grantOrRevokePrivileges(List principals, List privileges, PrivilegeObjectDesc privSubjectDesc, - String grantor, PrincipalType grantorType, boolean grantOption, boolean isGrant) { + String grantor, PrincipalType grantorType, boolean grantOption, boolean isGrant) + throws HiveException { + + if(SessionState.get().isAuthorizationModeV2()){ + return grantOrRevokePrivilegesV2(principals, privileges, privSubjectDesc, grantor, + grantorType, grantOption, isGrant); + } + if (privileges == null || privileges.size() == 0) { console.printError("No privilege found."); return 1; @@ -637,15 +679,12 @@ private int grantOrRevokePrivileges(List principals, } String obj = privSubjectDesc.getObject(); + //get the db, table objects if (privSubjectDesc.getTable()) { - String[] dbTab = obj.split("\\."); - if (dbTab.length == 2) { - dbName = dbTab[0]; - tableName = dbTab[1]; - } else { - dbName = SessionState.get().getCurrentDatabase(); - tableName = obj; - } + String[] dbTable = Utilities.getDbTableName(obj); + dbName = dbTable[0]; + tableName = dbTable[1]; + dbObj = db.getDatabase(dbName); if (dbObj == null) { throwNotFound("Database", dbName); @@ -756,13 +795,83 @@ private int grantOrRevokePrivileges(List principals, return 0; } + private int grantOrRevokePrivilegesV2(List principals, + List privileges, PrivilegeObjectDesc privSubjectDesc, String grantor, + PrincipalType grantorType, boolean grantOption, boolean isGrant) throws HiveException { + HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + + //Convert to object types used by the authorization plugin interface + List hivePrincipals = getHivePrincipals(principals); + List hivePrivileges = getHivePrivileges(privileges); + HivePrivilegeObject hivePrivObject = getHivePrivilegeObject(privSubjectDesc); + HivePrincipal grantorPrincipal = new HivePrincipal(grantor, getHivePrincipalType(grantorType)); + + if(isGrant){ + authorizer.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject, + grantorPrincipal, grantOption); + }else { + authorizer.revokePrivileges(hivePrincipals, hivePrivileges, + hivePrivObject, grantorPrincipal, grantOption); + } + //no exception thrown, so looks good + return 0; + } + + private HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc) + throws HiveException { + String [] dbTable = Utilities.getDbTableName(privSubjectDesc.getObject()); + return new HivePrivilegeObject(getPrivObjectType(privSubjectDesc), dbTable[0], dbTable[1]); + } + + private HivePrivilegeObjectType getPrivObjectType(PrivilegeObjectDesc privSubjectDesc) { + //TODO: This needs to change to support view once view grant/revoke is supported as + // part of HIVE-6181 + return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE : HivePrivilegeObjectType.DATABASE; + } + + private List getHivePrivileges(List privileges) { + List hivePrivileges = new ArrayList(); + for(PrivilegeDesc privilege : privileges){ + hivePrivileges.add( + new HivePrivilege(privilege.getPrivilege().toString(), privilege.getColumns())); + } + return hivePrivileges; + } + + private List getHivePrincipals(List principals) throws HiveException { + ArrayList hivePrincipals = new ArrayList(); + for(PrincipalDesc principal : principals){ + hivePrincipals.add( + new HivePrincipal(principal.getName(), getHivePrincipalType(principal.getType()))); + } + return hivePrincipals; + } + + private HivePrincipalType getHivePrincipalType(PrincipalType type) throws HiveException { + switch(type){ + case USER: + return HivePrincipalType.USER; + case ROLE: + return HivePrincipalType.ROLE; + case GROUP: + throw new HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP); + default: + //should not happen as we take care of all existing types + throw new HiveException("Unsupported authorization type specified"); + } + } + private void throwNotFound(String objType, String objName) throws HiveException { throw new HiveException(objType + " " + objName + " not found"); } - private int roleDDL(RoleDDLDesc roleDDLDesc) { - RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation(); + private int roleDDL(RoleDDLDesc roleDDLDesc) throws HiveException, IOException { + if(SessionState.get().isAuthorizationModeV2()){ + return roleDDLV2(roleDDLDesc); + } + DataOutputStream outStream = null; + RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation(); try { if (operation.equals(RoleDDLDesc.RoleOperation.CREATE_ROLE)) { db.createRole(roleDDLDesc.getName(), roleDDLDesc.getRoleOwnerName()); @@ -812,6 +921,48 @@ private int roleDDL(RoleDDLDesc roleDDLDesc) { return 0; } + private int roleDDLV2(RoleDDLDesc roleDDLDesc) throws HiveException, IOException { + HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation(); + //call the appropriate hive authorizer function + switch(operation){ + case CREATE_ROLE: + authorizer.createRole(roleDDLDesc.getName(), null); + break; + case DROP_ROLE: + authorizer.dropRole(roleDDLDesc.getName()); + break; + case SHOW_ROLE_GRANT: + List roles = authorizer.getRoles(new HivePrincipal(roleDDLDesc.getName(), + getHivePrincipalType(roleDDLDesc.getPrincipalType()))); + writeListToFile(roles, roleDDLDesc.getResFile()); + break; + case SHOW_ROLES: + List allRoles = authorizer.getAllRoles(); + writeListToFile(allRoles, roleDDLDesc.getResFile()); + break; + default: + throw new HiveException("Unkown role operation " + + operation.getOperationName()); + } + return 0; + } + + /** + * Write list of string entries into given file + * @param entries + * @param resFile + * @throws IOException + */ + private void writeListToFile(List entries, String resFile) throws IOException { + StringBuilder sb = new StringBuilder(entries.size()*2); + for(String entry : entries){ + sb.append(entry); + sb.append(terminator); + } + writeToFile(sb.toString(), resFile); + } + private int alterDatabase(AlterDatabaseDesc alterDbDesc) throws HiveException { String dbName = alterDbDesc.getDatabaseName(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index fccea89..758bfa2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -2014,6 +2014,25 @@ public static String formatBinaryString(byte[] array, int start, int length) { return names; } + /** + * Extract db and table name from dbtable string, where db and table are separated by "." + * If there is no db name part, set the current sessions default db + * @param dbtable + * @return String array with two elements, first is db name, second is table name + * @throws HiveException + */ + public static String[] getDbTableName(String dbtable) throws HiveException{ + String[] names = dbtable.split("\\."); + switch (names.length) { + case 2: + return names; + case 1: + return new String [] {SessionState.get().getCurrentDatabase(), dbtable}; + default: + throw new HiveException(ErrorMsg.INVALID_TABLE_NAME, dbtable); + } + } + public static void validateColumnNames(List colNames, List checkCols) throws SemanticException { Iterator checkColsIter = checkCols.iterator(); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 441f329..080aa5c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -2535,20 +2535,8 @@ public boolean deletePartitionColumnStatistics(String dbName, String tableName, } public Table newTable(String tableName) throws HiveException { - String[] names = getQualifiedNames(tableName); - switch (names.length) { - case 2: - return new Table(names[0], names[1]); - case 1: - return new Table(SessionState.get().getCurrentDatabase(), names[0]); - default: - try{ - throw new HiveException("Invalid table name: " + tableName); - }catch(Exception e) { - e.printStackTrace(); - } - throw new HiveException("Invalid table name: " + tableName); - } + String[] names = Utilities.getDbTableName(tableName); + return new Table(names[0], names[1]); } public String getDelegationToken(String owner, String renewer) diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java index 143c0a6..c65bf28 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java @@ -29,6 +29,9 @@ import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.DefaultHiveAuthorizerFactory; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory; import org.apache.hadoop.io.Text; import org.apache.hadoop.util.ReflectionUtils; @@ -334,6 +337,23 @@ public static HiveIndexHandler getIndexHandler(HiveConf conf, public static HiveAuthorizationProvider getAuthorizeProviderManager( Configuration conf, HiveConf.ConfVars authorizationProviderConfKey, HiveAuthenticationProvider authenticator) throws HiveException { + return getAuthorizeProviderManager(conf, authorizationProviderConfKey, authenticator, false); + } + + /** + * Create a new instance of HiveAuthorizationProvider + * @param conf + * @param authorizationProviderConfKey + * @param authenticator + * @param nullIfOtherClass - return null if configuration + * does not point to a HiveAuthorizationProvider subclass + * @return new instance of HiveAuthorizationProvider + * @throws HiveException + */ + @SuppressWarnings("unchecked") + public static HiveAuthorizationProvider getAuthorizeProviderManager( + Configuration conf, HiveConf.ConfVars authorizationProviderConfKey, + HiveAuthenticationProvider authenticator, boolean nullIfOtherClass) throws HiveException { String clsStr = HiveConf.getVar(conf, authorizationProviderConfKey); @@ -343,8 +363,11 @@ public static HiveAuthorizationProvider getAuthorizeProviderManager( if (clsStr == null || clsStr.trim().equals("")) { cls = DefaultHiveAuthorizationProvider.class; } else { - cls = (Class) Class.forName( - clsStr, true, JavaUtils.getClassLoader()); + Class configClass = Class.forName(clsStr, true, JavaUtils.getClassLoader()); + if(nullIfOtherClass && !HiveAuthorizationProvider.class.isAssignableFrom(configClass) ){ + return null; + } + cls = (Class)configClass; } if (cls != null) { ret = ReflectionUtils.newInstance(cls, conf); @@ -356,6 +379,31 @@ public static HiveAuthorizationProvider getAuthorizeProviderManager( return ret; } + + /** + * Return HiveAuthorizerFactory used by new authorization plugin interface. + * @param conf + * @param authorizationProviderConfKey + * @return + * @throws HiveException if HiveAuthorizerFactory specified in configuration could not + */ + public static HiveAuthorizerFactory getAuthorizerFactory( + Configuration conf, HiveConf.ConfVars authorizationProviderConfKey) + throws HiveException { + + Class cls = conf.getClass(authorizationProviderConfKey.varname, + DefaultHiveAuthorizerFactory.class, HiveAuthorizerFactory.class); + + if(cls == null){ + //should not happen as default value is set + throw new HiveException("Configuration value " + authorizationProviderConfKey.varname + + " is not set to valid HiveAuthorizerFactory subclass" ); + } + + HiveAuthorizerFactory authFactory = ReflectionUtils.newInstance(cls, conf); + return authFactory; + } + @SuppressWarnings("unchecked") public static HiveAuthenticationProvider getAuthenticator( Configuration conf, HiveConf.ConfVars authenticatorConfKey diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 52d7c75..6357f94 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -181,7 +181,9 @@ import org.apache.hadoop.mapred.InputFormat; /** - * Implementation of the semantic analyzer. + * Implementation of the semantic analyzer. It generates the query plan. + * There are other specific semantic analyzers for some hive operations such as + * DDLSemanticAnalyzer for ddl operations. */ public class SemanticAnalyzer extends BaseSemanticAnalyzer { diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/DefaultHiveAuthorizerFactory.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/DefaultHiveAuthorizerFactory.java new file mode 100644 index 0000000..7470e9d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/DefaultHiveAuthorizerFactory.java @@ -0,0 +1,30 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private; +import org.apache.hadoop.hive.conf.HiveConf; + +@Private +public class DefaultHiveAuthorizerFactory implements HiveAuthorizerFactory{ + @Override + public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, + HiveConf conf, String hiveCurrentUser) { + return null; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java new file mode 100644 index 0000000..8e4114f --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import java.util.List; + +/** + * Interface that is invoked by access control commands, including grant/revoke role/privileges, + * create/drop roles, and commands to read the state of authorization rules. + * Methods here have corresponding methods in HiveAuthorizer, check method documentation there. + */ +public interface HiveAccessController { + + void grantPrivileges(List hivePrincipals, List hivePrivileges, + HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption); + + void revokePrivileges(List hivePrincipals, List hivePrivileges, + HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption); + + void createRole(String roleName, HivePrincipal adminGrantor); + + void dropRole(String roleName); + + List getRoles(HivePrincipal hivePrincipal); + + void grantRole(List hivePrincipals, List roles, boolean grantOption, + HivePrincipal grantorPrinc); + + void revokeRole(List hivePrincipals, List roles, boolean grantOption, + HivePrincipal grantorPrinc); + + List getAllRoles(); + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java new file mode 100644 index 0000000..63046f5 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import java.util.List; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public; +import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; + +/** + * Interface used to check if user has privileges to perform certain action. + * Methods here have corresponding methods in HiveAuthorizer, check method documentation there. + */ +@Public +@Evolving +public interface HiveAuthorizationValidator { + /** + * Check if current user has privileges to perform given operation type hiveOpType on the given + * input and output objects + * @param hiveOpType + * @param inputHObjs + * @param outputHObjs + */ + void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs, + List outputHObjs); + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java new file mode 100644 index 0000000..c10a2ac --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java @@ -0,0 +1,125 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import java.util.List; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public; +import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; +import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; + +/** + * Interface for hive authorization plugins. + * Used by the DDLTasks for access control statement, + * and for checking authorization from Driver.doAuthorization() + * + * This a more generic version of + * {@link HiveAuthorizationProvider} that lets you define the behavior of access control + * statements and does not make assumptions about the privileges needed for a hive operation. + * This is referred to as V2 authorizer in other parts of the code. + */ +@Public +@Evolving +public interface HiveAuthorizer { + + /** + * Grant privileges for principals on the object + * @param hivePrincipals + * @param hivePrivileges + * @param hivePrivObject + * @param grantorPrincipal + * @param grantOption + */ + void grantPrivileges(List hivePrincipals, List hivePrivileges, + HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption); + + /** + * Revoke privileges for principals on the object + * @param hivePrincipals + * @param hivePrivileges + * @param hivePrivObject + * @param grantorPrincipal + * @param grantOption + */ + void revokePrivileges(List hivePrincipals, List hivePrivileges, + HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption); + + /** + * Create role + * @param roleName + * @param adminGrantor - The user in "[ WITH ADMIN ]" clause of "create role" + */ + void createRole(String roleName, HivePrincipal adminGrantor); + + /** + * Drop role + * @param roleName + */ + void dropRole(String roleName); + + /** + * Get roles that this user/role belongs to + * @param hivePrincipal - user or role + * @return list of roles + */ + List getRoles(HivePrincipal hivePrincipal); + + /** + * Grant roles in given roles list to principals in given hivePrincipals list + * @param hivePrincipals + * @param roles + * @param grantOption + * @param grantorPrinc + */ + void grantRole(List hivePrincipals, List roles, boolean grantOption, + HivePrincipal grantorPrinc); + + /** + * Revoke roles in given roles list to principals in given hivePrincipals list + * @param hivePrincipals + * @param roles + * @param grantOption + * @param grantorPrinc + */ + void revokeRole(List hivePrincipals, List roles, boolean grantOption, + HivePrincipal grantorPrinc); + + /** + * Check if user has privileges to do this action on these objects + * @param hiveOpType + * @param inputsHObjs + * @param outputHObjs + */ + void checkPrivileges(HiveOperationType hiveOpType, List inputsHObjs, + List outputHObjs); + + /** + * @return all existing roles + */ + List getAllRoles(); + + + //other functions to be added - + //showAllRoles() + //showUsersInRole(rolename) + //showgrants(username) + //isSuperuser(username) + + +} + diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java new file mode 100644 index 0000000..c004105 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public; +import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; +import org.apache.hadoop.hive.conf.HiveConf; + +/** + * Implementation of this interface specified through hive configuration will be used to + * create {@link HiveAuthorizer} instance used for hive authorization. + * + */ +@Public +@Evolving +public interface HiveAuthorizerFactory { + /** + * Create a new instance of HiveAuthorizer, initialized with the given objects. + * @param metastoreClientFactory - Use this to get the valid meta store client (IMetaStoreClient) + * for the current thread. Each invocation of method in HiveAuthorizer can happen in + * different thread, so get the current instance in each method invocation. + * @param conf - current HiveConf + * @param hiveCurrentUser - user for current session + * @return new instance of HiveAuthorizer + */ + HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, + HiveConf conf, String hiveCurrentUser); +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java new file mode 100644 index 0000000..ca95bfc --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java @@ -0,0 +1,102 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import java.util.List; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public; +import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; + +/** + * Convenience implementation of HiveAuthorizer. + * You can customize the behavior by passing different implementations of + * {@link HiveAccessController} and {@link HiveAuthorizationValidator} to constructor. + * + */ +@Public +@Evolving +public class HiveAuthorizerImpl implements HiveAuthorizer { + HiveAccessController accessController; + HiveAuthorizationValidator authValidator; + + HiveAuthorizerImpl(HiveAccessController accessController, HiveAuthorizationValidator authValidator){ + this.accessController = accessController; + this.authValidator = authValidator; + } + + @Override + public void grantPrivileges(List hivePrincipals, + List hivePrivileges, HivePrivilegeObject hivePrivObject, + HivePrincipal grantorPrincipal, boolean grantOption) { + accessController.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject, + grantorPrincipal, grantOption); + } + + @Override + public void revokePrivileges(List hivePrincipals, + List hivePrivileges, HivePrivilegeObject hivePrivObject, + HivePrincipal grantorPrincipal, boolean grantOption) { + accessController.revokePrivileges(hivePrincipals, hivePrivileges, hivePrivObject, + grantorPrincipal, grantOption); + } + + @Override + public void createRole(String roleName, HivePrincipal adminGrantor) { + accessController.createRole(roleName, adminGrantor); + } + + @Override + public void dropRole(String roleName) { + accessController.dropRole(roleName); + } + + @Override + public List getRoles(HivePrincipal hivePrincipal) { + return accessController.getRoles(hivePrincipal); + } + + @Override + public void grantRole(List hivePrincipals, List roles, + boolean grantOption, HivePrincipal grantorPrinc) { + accessController.grantRole(hivePrincipals, roles, grantOption, grantorPrinc); + } + + @Override + public void revokeRole(List hivePrincipals, List roles, + boolean grantOption, HivePrincipal grantorPrinc) { + accessController.revokeRole(hivePrincipals, roles, grantOption, grantorPrinc); + } + + @Override + public void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs, + List outputHObjs) { + authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs); + } + + @Override + public List getAllRoles() { + return accessController.getAllRoles(); + } + + + // other access control functions + +// void validateAuthority(HiveAction, inputs, outputs){ +// authValidator.validateAuthority(HiveAction, inputs, outputs); +// } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java new file mode 100644 index 0000000..4208b2d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java @@ -0,0 +1,30 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import java.io.IOException; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +/** + * Factory for getting current valid instance of IMetaStoreClient + */ +@Public +public interface HiveMetastoreClientFactory { + IMetaStoreClient getHiveMetastoreClient() throws IOException; +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactoryImpl.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactoryImpl.java new file mode 100644 index 0000000..1fadb3e --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactoryImpl.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + + +import java.io.IOException; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private; +import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.metadata.HiveException; +/** + * Private implementaiton that returns instance of IMetaStoreClient + */ +@Private +public class HiveMetastoreClientFactoryImpl implements HiveMetastoreClientFactory{ + + @Override + public IMetaStoreClient getHiveMetastoreClient() throws IOException { + try { + return Hive.get().getMSC(); + } catch (MetaException e) { + throw new IOException(e); + } catch (HiveException e) { + throw new IOException(e); + } + } + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java new file mode 100644 index 0000000..e20b183 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java @@ -0,0 +1,110 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public; + +/** + * List of hive operations types. + */ +@Public +public enum HiveOperationType { + EXPLAIN, + LOAD, + EXPORT, + IMPORT, + CREATEDATABASE, + DROPDATABASE, + SWITCHDATABASE, + LOCKDB, + UNLOCKDB, + DROPTABLE , + DESCTABLE, + DESCFUNCTION, + MSCK, + ALTERTABLE_ADDCOLS, + ALTERTABLE_REPLACECOLS, + ALTERTABLE_RENAMECOL, + ALTERTABLE_RENAMEPART, + ALTERTABLE_RENAME, + ALTERTABLE_DROPPARTS, + ALTERTABLE_ADDPARTS, + ALTERTABLE_TOUCH, + ALTERTABLE_ARCHIVE, + ALTERTABLE_UNARCHIVE, + ALTERTABLE_PROPERTIES, + ALTERTABLE_SERIALIZER, + ALTERPARTITION_SERIALIZER, + ALTERTABLE_SERDEPROPERTIES, + ALTERPARTITION_SERDEPROPERTIES, + ALTERTABLE_CLUSTER_SORT, + ANALYZE_TABLE, + ALTERTABLE_BUCKETNUM, + ALTERPARTITION_BUCKETNUM, + SHOWDATABASES, + SHOWTABLES, + SHOWCOLUMNS, + SHOW_TABLESTATUS, + SHOW_TBLPROPERTIES, + SHOW_CREATETABLE, + SHOWFUNCTIONS, + SHOWINDEXES, + SHOWPARTITIONS, + SHOWLOCKS, + CREATEFUNCTION, + DROPFUNCTION, + CREATEMACRO, + DROPMACRO, + CREATEVIEW, + DROPVIEW, + CREATEINDEX, + DROPINDEX, + ALTERINDEX_REBUILD, + ALTERVIEW_PROPERTIES, + DROPVIEW_PROPERTIES, + LOCKTABLE, + UNLOCKTABLE, + CREATEROLE, + DROPROLE, + GRANT_PRIVILEGE, + REVOKE_PRIVILEGE, + SHOW_GRANT, + GRANT_ROLE, + REVOKE_ROLE, + SHOW_ROLES, + SHOW_ROLE_GRANT, + ALTERTABLE_PROTECTMODE, + ALTERPARTITION_PROTECTMODE, + ALTERTABLE_FILEFORMAT, + ALTERPARTITION_FILEFORMAT, + ALTERTABLE_LOCATION, + ALTERPARTITION_LOCATION, + CREATETABLE, + TRUNCATETABLE, + CREATETABLE_AS_SELECT, + QUERY, + ALTERINDEX_PROPS, + ALTERDATABASE, + DESCDATABASE, + ALTERTABLE_MERGEFILES, + ALTERPARTITION_MERGEFILES, + ALTERTABLE_SKEWED, + ALTERTBLPART_SKEWED_LOCATION, + ALTERVIEW_RENAME, + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java new file mode 100644 index 0000000..42e9f23 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +/** + * Represents the user or role in grant/revoke statements + */ +public class HivePrincipal { + + public enum HivePrincipalType{ + USER, ROLE, UNKNOWN + } + + private final String name; + private final HivePrincipalType type; + + public HivePrincipal(String name, HivePrincipalType type){ + this.name = name; + this.type = type; + } + public String getName() { + return name; + } + public HivePrincipalType getType() { + return type; + } + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java new file mode 100644 index 0000000..4b9d133 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import java.util.List; + +/** + * Represents the hive privilege being granted/revoked + */ +public class HivePrivilege { + private final String name; + private final List columns; + + public HivePrivilege(String name, List columns){ + this.name = name; + this.columns = columns; + } + + public String getName() { + return name; + } + + public List getColumns() { + return columns; + } + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java new file mode 100644 index 0000000..5b101c2 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java @@ -0,0 +1,52 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public; +import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable; + +/** + * Represents the object on which privilege is being granted/revoked + */ +@Public +@Unstable +public class HivePrivilegeObject { + + public enum HivePrivilegeObjectType { DATABASE, TABLE, VIEW, PARTITION, URI}; + private final HivePrivilegeObjectType type; + private final String dbname; + private final String tableviewname; + + public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableviewname){ + this.type = type; + this.dbname = dbname; + this.tableviewname = tableviewname; + } + + public HivePrivilegeObjectType getType() { + return type; + } + + public String getDbname() { + return dbname; + } + + public String getTableviewname() { + return tableviewname; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index ef35f1a..7fa49bd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -54,7 +54,11 @@ import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl; import org.apache.hadoop.hive.ql.util.DosToUnix; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.util.ReflectionUtils; @@ -118,6 +122,10 @@ private HiveAuthorizationProvider authorizer; + private HiveAuthorizer authorizerV2; + + public enum AuthorizationMode{V1, V2}; + private HiveAuthenticationProvider authenticator; private CreateTableAutomaticGrant createTableGrants; @@ -297,15 +305,33 @@ public static SessionState start(SessionState startSs) { // that would cause ClassNoFoundException otherwise throw new RuntimeException(e); } + setupAuth(startSs); + return startSs; + } + /** + * Setup authentication and authorization plugins for this session. + * @param startSs + */ + private static void setupAuth(SessionState startSs) { try { startSs.authenticator = HiveUtils.getAuthenticator( startSs.getConf(),HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER); startSs.authorizer = HiveUtils.getAuthorizeProviderManager( startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, - startSs.authenticator); - startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs - .getConf()); + startSs.authenticator, true); + + if(startSs.authorizer == null){ + //if it was null, the new authorization plugin must be specified in config + HiveAuthorizerFactory authorizerFactory = + HiveUtils.getAuthorizerFactory(startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER); + startSs.authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), + startSs.getConf(), startSs.authenticator.getUserName()); + } + else{ + startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs + .getConf()); + } } catch (HiveException e) { throw new RuntimeException(e); } @@ -323,8 +349,7 @@ public static SessionState start(SessionState startSs) { } else { LOG.info("No Tez session required at this point. hive.execution.engine=mr."); } - - return startSs; + return; } /** @@ -749,6 +774,10 @@ public void setAuthorizer(HiveAuthorizationProvider authorizer) { this.authorizer = authorizer; } + public HiveAuthorizer getAuthorizerV2() { + return authorizerV2; + } + public HiveAuthenticationProvider getAuthenticator() { return authenticator; } @@ -842,6 +871,20 @@ public void close() throws IOException { } } + public AuthorizationMode getAuthorizationMode(){ + if(authorizer != null){ + return AuthorizationMode.V1; + }else if(authorizerV2 != null){ + return AuthorizationMode.V2; + } + //should not happen - this should not get called before this.start() is called + throw new AssertionError("Authorization plugins not initialized!"); + } + + public boolean isAuthorizationModeV2(){ + return getAuthorizationMode() == AuthorizationMode.V2; + } + /** * @param resetPerfLogger * @return Tries to return an instance of the class whose name is configured in diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java index 4f31f75..91f4d3f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java @@ -26,10 +26,13 @@ import junit.framework.TestCase; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.mapred.JobConf; @@ -74,4 +77,33 @@ public void testSerializeTimestamp() { assertEquals(desc.getExprString(), Utilities.deserializeExpression( Utilities.serializeExpression(desc)).getExprString()); } + + public void testgetDbTableName() throws HiveException{ + String tablename; + String [] dbtab; + SessionState.start(new HiveConf(this.getClass())); + String curDefaultdb = SessionState.get().getCurrentDatabase(); + + //test table without db portion + tablename = "tab1"; + dbtab = Utilities.getDbTableName(tablename); + assertEquals("db name", curDefaultdb, dbtab[0]); + assertEquals("table name", tablename, dbtab[1]); + + //test table with db portion + tablename = "dab1.tab1"; + dbtab = Utilities.getDbTableName(tablename); + assertEquals("db name", "dab1", dbtab[0]); + assertEquals("table name", "tab1", dbtab[1]); + + //test invalid table name + tablename = "dab1.tab1.x1"; + try { + dbtab = Utilities.getDbTableName(tablename); + fail("exception was expected for invalid table name"); + } catch(HiveException ex){ + assertEquals("Invalid table name " + tablename, ex.getMessage()); + } + } + } diff --git ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java new file mode 100644 index 0000000..b990cb2 --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import static org.junit.Assert.*; + +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.junit.Test; + +/** + * Test HiveOperationType + */ +public class TestHiveOperationType { + + /** + * test that all enums in {@link HiveOperation} match one in @{link HiveOperationType} + */ + @Test + public void checkHiveOperationTypeMatch(){ + for (HiveOperation op : HiveOperation.values()) { + try { + HiveOperationType.valueOf(op.name()); + } catch(IllegalArgumentException ex) { + // if value is null or not found, exception would get thrown + fail("Unable to find corresponding type in HiveOperationType for " + op + " : " + ex ); + } + } + assertEquals("Check if HiveOperation, HiveOperationType have same number of instances", + HiveOperation.values().length, HiveOperationType.values().length); + } + +}