diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 72c04d3..bd95161 100644
--- ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -55,6 +55,7 @@
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
import org.apache.hadoop.hive.ql.hooks.Entity;
+import org.apache.hadoop.hive.ql.hooks.Entity.Type;
import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
import org.apache.hadoop.hive.ql.hooks.Hook;
import org.apache.hadoop.hive.ql.hooks.HookContext;
@@ -101,6 +102,9 @@
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.processors.CommandProcessor;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.serde2.ByteStream;
@@ -523,6 +527,11 @@ private void doAuthorization(BaseSemanticAnalyzer sem)
SessionState ss = SessionState.get();
HiveOperation op = ss.getHiveOperation();
Hive db = sem.getDb();
+ if(ss.isAuthorizationModeV2()){
+ doAuthorizationV2(ss, op, inputs, outputs);
+ return;
+ }
+
if (op != null) {
if (op.equals(HiveOperation.CREATEDATABASE)) {
ss.getAuthorizer().authorize(
@@ -543,6 +552,7 @@ private void doAuthorization(BaseSemanticAnalyzer sem)
}
}
if (outputs != null && outputs.size() > 0) {
+ //do authorization for each output
for (WriteEntity write : outputs) {
if (write.getType() == Entity.Type.DATABASE) {
ss.getAuthorizer().authorize(write.getDatabase(),
@@ -570,10 +580,10 @@ private void doAuthorization(BaseSemanticAnalyzer sem)
}
if (inputs != null && inputs.size() > 0) {
-
Map
> tab2Cols = new HashMap>();
Map> part2Cols = new HashMap>();
+ //determine if partition level privileges should be checked for input tables
Map tableUsePartLevelAuth = new HashMap();
for (ReadEntity read : inputs) {
if (read.getType() == Entity.Type.DATABASE) {
@@ -596,6 +606,8 @@ private void doAuthorization(BaseSemanticAnalyzer sem)
}
}
+ //for a select or create-as-select query, populate the partition to column (par2Cols) or
+ // table to columns mapping (tab2Cols)
if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
|| op.equals(HiveOperation.QUERY)) {
SemanticAnalyzer querySem = (SemanticAnalyzer) sem;
@@ -691,6 +703,49 @@ private void doAuthorization(BaseSemanticAnalyzer sem)
}
}
+ private void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet inputs,
+ HashSet outputs) {
+ HiveOperationType hiveOpType = getHiveOperationType(op);
+ List inputsHObjs = getHivePrivObjects(inputs);
+ List outputHObjs = getHivePrivObjects(outputs);
+ ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs);
+ return;
+ }
+
+ private List getHivePrivObjects(HashSet extends Entity> inputs) {
+ List hivePrivobjs = new ArrayList();
+ for(Entity input : inputs){
+ HivePrivilegeObjectType privObjType = getHivePrivilegeObjectType(input.getType());
+ //support for authorization on partitions or uri needs to be added
+ HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType,
+ input.getDatabase().getName(),
+ input.getTable().getTableName());
+ hivePrivobjs.add(hPrivObject);
+ }
+ return hivePrivobjs;
+ }
+
+ private HivePrivilegeObjectType getHivePrivilegeObjectType(Type type) {
+ switch(type){
+ case DATABASE:
+ return HivePrivilegeObjectType.DATABASE;
+ case TABLE:
+ return HivePrivilegeObjectType.TABLE;
+ case LOCAL_DIR:
+ case DFS_DIR:
+ return HivePrivilegeObjectType.URI;
+ case PARTITION:
+ case DUMMYPARTITION: //need to determine if a different type is needed for dummy partitions
+ return HivePrivilegeObjectType.PARTITION;
+ default:
+ return null;
+ }
+ }
+
+ private HiveOperationType getHiveOperationType(HiveOperation op) {
+ return HiveOperationType.valueOf(op.name());
+ }
+
/**
* @return The current query plan associated with this Driver, if any.
*/
diff --git ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index b36a4ca..13422d3 100644
--- ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -45,7 +45,7 @@
// 30000 to 39999: Runtime errors which Hive thinks may be transient and retrying may succeed.
// 40000 to 49999: Errors where Hive is unable to advise about retries.
// In addition to the error code, ErrorMsg also has a SQLState field.
- // SQLStates are taken from Section 12.5 of ISO-9075.
+ // SQLStates are taken from Section 22.1 of ISO-9075.
// See http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt
// Most will just rollup to the generic syntax error state of 42000, but
// specific errors can override the that state.
@@ -53,6 +53,7 @@
// http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-error-sqlstates.html
GENERIC_ERROR(40000, "Exception while processing"),
+ //========================== 10000 range starts here ========================//
INVALID_TABLE(10001, "Table not found", "42S02"),
INVALID_COLUMN(10002, "Invalid column reference"),
INVALID_INDEX(10003, "Invalid index"),
@@ -370,7 +371,10 @@
INVALID_DIR(10252, "{0} is not a directory", true),
NO_VALID_LOCATIONS(10253, "Could not find any valid location to place the jars. " +
"Please update hive.jar.directory or hive.user.install.directory with a valid location", false),
+ UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP(10254,
+ "Principal type GROUP is not supported in this authorization setting", "28000"),
+ //========================== 20000 range starts here ========================//
SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."),
SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. "
+ "It may have crashed with an error."),
@@ -382,6 +386,7 @@
"tried to create too many dynamic partitions. The maximum number of dynamic partitions " +
"is controlled by hive.exec.max.dynamic.partitions and hive.exec.max.dynamic.partitions.pernode. "),
+ //========================== 30000 range starts here ========================//
STATSPUBLISHER_NOT_OBTAINED(30000, "StatsPublisher cannot be obtained. " +
"There was a error to retrieve the StatsPublisher, and retrying " +
"might help. If you dont want the query to fail because accurate statistics " +
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index dc45ea2..09f63a4 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -157,6 +157,12 @@
import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
@@ -399,7 +405,8 @@ public int execute(DriverContext driverContext) {
GrantDesc grantDesc = work.getGrantDesc();
if (grantDesc != null) {
return grantOrRevokePrivileges(grantDesc.getPrincipals(), grantDesc
- .getPrivileges(), grantDesc.getPrivilegeSubjectDesc(), grantDesc.getGrantor(), grantDesc.getGrantorType(), grantDesc.isGrantOption(), true);
+ .getPrivileges(), grantDesc.getPrivilegeSubjectDesc(), grantDesc.getGrantor(),
+ grantDesc.getGrantorType(), grantDesc.isGrantOption(), true);
}
RevokeDesc revokeDesc = work.getRevokeDesc();
@@ -489,6 +496,11 @@ private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL)
boolean grantRole = grantOrRevokeRoleDDL.getGrant();
List principals = grantOrRevokeRoleDDL.getPrincipalDesc();
List roles = grantOrRevokeRoleDDL.getRoles();
+
+ if(SessionState.get().isAuthorizationModeV2()){
+ return grantOrRevokeRoleV2(grantOrRevokeRoleDDL);
+ }
+
for (PrincipalDesc principal : principals) {
String userName = principal.getName();
for (String roleName : roles) {
@@ -507,6 +519,28 @@ private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL)
return 0;
}
+ private int grantOrRevokeRoleV2(GrantRevokeRoleDDL grantOrRevokeRoleDDL) throws HiveException {
+ HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+ //convert to the types needed for plugin api
+ HivePrincipal grantorPrinc = null;
+ if(grantOrRevokeRoleDDL.getGrantor() != null){
+ grantorPrinc = new HivePrincipal(grantOrRevokeRoleDDL.getGrantor(),
+ getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType()));
+ }
+ List hivePrincipals = getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc());
+ List roles = grantOrRevokeRoleDDL.getRoles();
+
+ if(grantOrRevokeRoleDDL.getGrant()){
+ authorizer.grantRole(hivePrincipals, roles,
+ grantOrRevokeRoleDDL.isGrantOption(), grantorPrinc);
+ }
+ else{
+ authorizer.revokeRole(hivePrincipals, roles,
+ grantOrRevokeRoleDDL.isGrantOption(), grantorPrinc);
+ }
+ return 0;
+ }
+
private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException {
StringBuilder builder = new StringBuilder();
try {
@@ -514,6 +548,7 @@ private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException {
PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj();
String principalName = principalDesc.getName();
if (hiveObjectDesc == null) {
+ //show all privileges for this user
List users = db.showPrivilegeGrant(
HiveObjectType.GLOBAL, principalName, principalDesc.getType(),
null, null, null, null);
@@ -619,7 +654,14 @@ public int compare(HiveObjectPrivilege one, HiveObjectPrivilege other) {
private int grantOrRevokePrivileges(List principals,
List privileges, PrivilegeObjectDesc privSubjectDesc,
- String grantor, PrincipalType grantorType, boolean grantOption, boolean isGrant) {
+ String grantor, PrincipalType grantorType, boolean grantOption, boolean isGrant)
+ throws HiveException {
+
+ if(SessionState.get().isAuthorizationModeV2()){
+ return grantOrRevokePrivilegesV2(principals, privileges, privSubjectDesc, grantor,
+ grantorType, grantOption, isGrant);
+ }
+
if (privileges == null || privileges.size() == 0) {
console.printError("No privilege found.");
return 1;
@@ -638,15 +680,12 @@ private int grantOrRevokePrivileges(List principals,
}
String obj = privSubjectDesc.getObject();
+ //get the db, table objects
if (privSubjectDesc.getTable()) {
- String[] dbTab = obj.split("\\.");
- if (dbTab.length == 2) {
- dbName = dbTab[0];
- tableName = dbTab[1];
- } else {
- dbName = SessionState.get().getCurrentDatabase();
- tableName = obj;
- }
+ String[] dbTable = getDbTableName(obj);
+ dbName = dbTable[0];
+ tableName = dbTable[1];
+
dbObj = db.getDatabase(dbName);
if (dbObj == null) {
throwNotFound("Database", dbName);
@@ -757,13 +796,97 @@ private int grantOrRevokePrivileges(List principals,
return 0;
}
+ private int grantOrRevokePrivilegesV2(List principals,
+ List privileges, PrivilegeObjectDesc privSubjectDesc, String grantor,
+ PrincipalType grantorType, boolean grantOption, boolean isGrant) throws HiveException {
+ HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+
+ //Convert to object types used by the authorization plugin interface
+ List hivePrincipals = getHivePrincipals(principals);
+ List hivePrivileges = getHivePrivileges(privileges);
+ HivePrivilegeObject hivePrivObject = getHivePrivilegeObject(privSubjectDesc);
+ HivePrincipal grantorPrincipal = new HivePrincipal(grantor, getHivePrincipalType(grantorType));
+
+ if(isGrant){
+ authorizer.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject,
+ grantorPrincipal, grantOption);
+ }else {
+ authorizer.revokePrivileges(hivePrincipals, hivePrivileges,
+ hivePrivObject, grantorPrincipal, grantOption);
+ }
+ //no exception thrown, so looks good
+ return 0;
+ }
+
+ /**
+ * @param obj table name that might contain db name (eg "db1.tabl1")
+ * @return string array with db name, table name
+ */
+ private String[] getDbTableName(String obj) {
+ String[] dbTab = obj.split("\\.");
+ if (dbTab.length != 2) {
+ //no db part in tablename, use default db for current session
+ dbTab = new String [2];
+ dbTab[0] = SessionState.get().getCurrentDatabase();
+ dbTab[1] = obj;
+ }
+ return dbTab;
+ }
+
+ private HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc) {
+ String [] dbTable = getDbTableName(privSubjectDesc.getObject());
+ return new HivePrivilegeObject(getPrivObjectType(privSubjectDesc), dbTable[0], dbTable[1]);
+ }
+
+ private HivePrivilegeObjectType getPrivObjectType(PrivilegeObjectDesc privSubjectDesc) {
+ //TODO: This needs to change to support view once view grant/revoke is supported as
+ // part of HIVE-6181
+ return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE : HivePrivilegeObjectType.DATABASE;
+ }
+
+ private List getHivePrivileges(List privileges) {
+ List hivePrivileges = new ArrayList();
+ for(PrivilegeDesc privilege : privileges){
+ hivePrivileges.add(
+ new HivePrivilege(privilege.getPrivilege().toString(), privilege.getColumns()));
+ }
+ return hivePrivileges;
+ }
+
+ private List getHivePrincipals(List principals) throws HiveException {
+ ArrayList hivePrincipals = new ArrayList();
+ for(PrincipalDesc principal : principals){
+ hivePrincipals.add(
+ new HivePrincipal(principal.getName(), getHivePrincipalType(principal.getType())));
+ }
+ return hivePrincipals;
+ }
+
+ private HivePrincipalType getHivePrincipalType(PrincipalType type) throws HiveException {
+ switch(type){
+ case USER:
+ return HivePrincipalType.USER;
+ case ROLE:
+ return HivePrincipalType.ROLE;
+ case GROUP:
+ throw new HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
+ default:
+ //should not happen as we take care of all existing types
+ throw new HiveException("Unsupported authorization type specified");
+ }
+ }
+
private void throwNotFound(String objType, String objName) throws HiveException {
throw new HiveException(objType + " " + objName + " not found");
}
- private int roleDDL(RoleDDLDesc roleDDLDesc) {
- RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation();
+ private int roleDDL(RoleDDLDesc roleDDLDesc) throws HiveException, IOException {
+ if(SessionState.get().isAuthorizationModeV2()){
+ return roleDDLV2(roleDDLDesc);
+ }
+
DataOutput outStream = null;
+ RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation();
try {
if (operation.equals(RoleDDLDesc.RoleOperation.CREATE_ROLE)) {
db.createRole(roleDDLDesc.getName(), roleDDLDesc.getRoleOwnerName());
@@ -802,6 +925,44 @@ private int roleDDL(RoleDDLDesc roleDDLDesc) {
return 0;
}
+ private int roleDDLV2(RoleDDLDesc roleDDLDesc) throws HiveException, IOException {
+ HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+ RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation();
+ //call the appropriate hive authorizer function
+ switch(operation){
+ case CREATE_ROLE:
+ authorizer.createRole(roleDDLDesc.getName(), null);
+ break;
+ case DROP_ROLE:
+ authorizer.dropRole(roleDDLDesc.getName());
+ break;
+ case SHOW_ROLE_GRANT:
+ List roles = authorizer.getRoles(new HivePrincipal(roleDDLDesc.getName(),
+ getHivePrincipalType(roleDDLDesc.getPrincipalType())));
+ writeListToFile(roles, roleDDLDesc.getResFile());
+ break;
+ default:
+ throw new HiveException("Unkown role operation "
+ + operation.getOperationName());
+ }
+ return 0;
+ }
+
+ /**
+ * Write list of string entries into given file
+ * @param entries
+ * @param resFile
+ * @throws IOException
+ */
+ private void writeListToFile(List entries, String resFile) throws IOException {
+ StringBuilder sb = new StringBuilder(entries.size()*2);
+ for(String entry : entries){
+ sb.append(entry);
+ sb.append(terminator);
+ }
+ writeToFile(sb.toString(), resFile);
+ }
+
private int alterDatabase(AlterDatabaseDesc alterDbDesc) throws HiveException {
String dbName = alterDbDesc.getDatabaseName();
diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
index 143c0a6..30b6374 100644
--- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
+++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
@@ -29,6 +29,9 @@
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.DefaultHiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.ReflectionUtils;
@@ -334,6 +337,23 @@ public static HiveIndexHandler getIndexHandler(HiveConf conf,
public static HiveAuthorizationProvider getAuthorizeProviderManager(
Configuration conf, HiveConf.ConfVars authorizationProviderConfKey,
HiveAuthenticationProvider authenticator) throws HiveException {
+ return getAuthorizeProviderManager(conf, authorizationProviderConfKey, authenticator, false);
+ }
+
+ /**
+ * Create a new instance of HiveAuthorizationProvider
+ * @param conf
+ * @param authorizationProviderConfKey
+ * @param authenticator
+ * @param nullIfOtherClass - return null if configuration
+ * does not point to a HiveAuthorizationProvider subclass
+ * @return new instance of HiveAuthorizationProvider
+ * @throws HiveException
+ */
+ @SuppressWarnings("unchecked")
+ public static HiveAuthorizationProvider getAuthorizeProviderManager(
+ Configuration conf, HiveConf.ConfVars authorizationProviderConfKey,
+ HiveAuthenticationProvider authenticator, boolean nullIfOtherClass) throws HiveException {
String clsStr = HiveConf.getVar(conf, authorizationProviderConfKey);
@@ -343,8 +363,11 @@ public static HiveAuthorizationProvider getAuthorizeProviderManager(
if (clsStr == null || clsStr.trim().equals("")) {
cls = DefaultHiveAuthorizationProvider.class;
} else {
- cls = (Class extends HiveAuthorizationProvider>) Class.forName(
- clsStr, true, JavaUtils.getClassLoader());
+ Class> configClass = Class.forName(clsStr, true, JavaUtils.getClassLoader());
+ if(nullIfOtherClass && !HiveAuthorizationProvider.class.isAssignableFrom(configClass) ){
+ return null;
+ }
+ cls = (Class extends HiveAuthorizationProvider>)configClass;
}
if (cls != null) {
ret = ReflectionUtils.newInstance(cls, conf);
@@ -356,6 +379,31 @@ public static HiveAuthorizationProvider getAuthorizeProviderManager(
return ret;
}
+
+ /**
+ * Return HiveAuthorizerFactory used by new authorization plugin interface.
+ * @param conf
+ * @param authorizationProviderConfKey
+ * @return
+ * @throws HiveException if HiveAuthorizerFactory specified in configuration could not
+ */
+ public static HiveAuthorizerFactory getAuthorizerFactory(
+ Configuration conf, HiveConf.ConfVars authorizationProviderConfKey)
+ throws HiveException {
+
+ Class extends HiveAuthorizerFactory> cls = conf.getClass(authorizationProviderConfKey.varname,
+ DefaultHiveAuthorizerFactory.class, HiveAuthorizerFactory.class);
+
+ if(cls == null){
+ //should not happen as default value is set
+ throw new HiveException("Configuration value " + authorizationProviderConfKey.varname
+ + " is not set to valid HiveAuthorizerFactory subclass" );
+ }
+
+ HiveAuthorizerFactory authFactory = ReflectionUtils.newInstance(cls, conf);
+ return authFactory;
+ }
+
@SuppressWarnings("unchecked")
public static HiveAuthenticationProvider getAuthenticator(
Configuration conf, HiveConf.ConfVars authenticatorConfKey
diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 52d7c75..6357f94 100644
--- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -181,7 +181,9 @@
import org.apache.hadoop.mapred.InputFormat;
/**
- * Implementation of the semantic analyzer.
+ * Implementation of the semantic analyzer. It generates the query plan.
+ * There are other specific semantic analyzers for some hive operations such as
+ * DDLSemanticAnalyzer for ddl operations.
*/
public class SemanticAnalyzer extends BaseSemanticAnalyzer {
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/DefaultHiveAuthorizerFactory.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/DefaultHiveAuthorizerFactory.java
new file mode 100644
index 0000000..8a8f781
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/DefaultHiveAuthorizerFactory.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+@Private
+public class DefaultHiveAuthorizerFactory implements HiveAuthorizerFactory{
+ @Override
+ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
+ HiveConf conf, String hiveCurrentUser) {
+ // return new HiveAuthorizerImpl(new DefaultHiveAccessController(db, conf), new Default HiveAuthValidator(db, conf));
+ return null;
+ }
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java
new file mode 100644
index 0000000..0c33a82
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessController.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import java.util.List;
+
+/**
+ * Interface that is invoked by access control commands, including grant/revoke role/privileges,
+ * create/drop roles, and commands to read the state of authorization rules.
+ * Methods here have corresponding methods in HiveAuthorizer, check method documentation there.
+ */
+public interface HiveAccessController {
+
+ void grantPrivileges(List hivePrincipals, List hivePrivileges,
+ HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption);
+
+ void revokePrivileges(List hivePrincipals, List hivePrivileges,
+ HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption);
+
+ void createRole(String roleName, HivePrincipal adminGrantor);
+
+ void dropRole(String roleName);
+
+ List getRoles(HivePrincipal hivePrincipal);
+
+ void grantRole(List hivePrincipals, List roles, boolean grantOption,
+ HivePrincipal grantorPrinc);
+
+ void revokeRole(List hivePrincipals, List roles, boolean grantOption,
+ HivePrincipal grantorPrinc);
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java
new file mode 100644
index 0000000..63046f5
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
+import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+
+/**
+ * Interface used to check if user has privileges to perform certain action.
+ * Methods here have corresponding methods in HiveAuthorizer, check method documentation there.
+ */
+@Public
+@Evolving
+public interface HiveAuthorizationValidator {
+ /**
+ * Check if current user has privileges to perform given operation type hiveOpType on the given
+ * input and output objects
+ * @param hiveOpType
+ * @param inputHObjs
+ * @param outputHObjs
+ */
+ void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs,
+ List outputHObjs);
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
new file mode 100644
index 0000000..266a772
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
+import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+
+/**
+ * Interface for hive authorization plugins.
+ * Used by the DDLTasks for access control statement,
+ * and for checking authorization from Driver.doAuthorization()
+ */
+@Public
+@Evolving
+public interface HiveAuthorizer {
+
+ /**
+ * Grant privileges for principals on the object
+ * @param hivePrincipals
+ * @param hivePrivileges
+ * @param hivePrivObject
+ * @param grantorPrincipal
+ * @param grantOption
+ */
+ void grantPrivileges(List hivePrincipals, List hivePrivileges,
+ HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption);
+
+ /**
+ * Revoke privileges for principals on the object
+ * @param hivePrincipals
+ * @param hivePrivileges
+ * @param hivePrivObject
+ * @param grantorPrincipal
+ * @param grantOption
+ */
+ void revokePrivileges(List hivePrincipals, List hivePrivileges,
+ HivePrivilegeObject hivePrivObject, HivePrincipal grantorPrincipal, boolean grantOption);
+
+ /**
+ * Create role
+ * @param roleName
+ * @param adminGrantor - The user in "[ WITH ADMIN ]" clause of "create role"
+ */
+ void createRole(String roleName, HivePrincipal adminGrantor);
+
+ /**
+ * Drop role
+ * @param roleName
+ */
+ void dropRole(String roleName);
+
+ /**
+ * Get roles that this user/role belongs to
+ * @param hivePrincipal - user or role
+ * @return list of roles
+ */
+ List getRoles(HivePrincipal hivePrincipal);
+
+ /**
+ * Grant roles in given roles list to principals in given hivePrincipals list
+ * @param hivePrincipals
+ * @param roles
+ * @param grantOption
+ * @param grantorPrinc
+ */
+ void grantRole(List hivePrincipals, List roles, boolean grantOption,
+ HivePrincipal grantorPrinc);
+
+ /**
+ * Revoke roles in given roles list to principals in given hivePrincipals list
+ * @param hivePrincipals
+ * @param roles
+ * @param grantOption
+ * @param grantorPrinc
+ */
+ void revokeRole(List hivePrincipals, List roles, boolean grantOption,
+ HivePrincipal grantorPrinc);
+
+ /**
+ * Check if user has privileges to do this action on these objects
+ * @param hiveOpType
+ * @param inputsHObjs
+ * @param outputHObjs
+ */
+ void checkPrivileges(HiveOperationType hiveOpType, List inputsHObjs,
+ List outputHObjs);
+
+
+ //other functions to be added -
+ //showAllRoles()
+ //showUsersInRole(rolename)
+ //showgrants(username)
+ //isSuperuser(username)
+
+
+}
+
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java
new file mode 100644
index 0000000..c004105
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerFactory.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
+import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+/**
+ * Implementation of this interface specified through hive configuration will be used to
+ * create {@link HiveAuthorizer} instance used for hive authorization.
+ *
+ */
+@Public
+@Evolving
+public interface HiveAuthorizerFactory {
+ /**
+ * Create a new instance of HiveAuthorizer, initialized with the given objects.
+ * @param metastoreClientFactory - Use this to get the valid meta store client (IMetaStoreClient)
+ * for the current thread. Each invocation of method in HiveAuthorizer can happen in
+ * different thread, so get the current instance in each method invocation.
+ * @param conf - current HiveConf
+ * @param hiveCurrentUser - user for current session
+ * @return new instance of HiveAuthorizer
+ */
+ HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
+ HiveConf conf, String hiveCurrentUser);
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
new file mode 100644
index 0000000..244a9cc
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
+import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+
+/**
+ * Convenience implementation of HiveAuthorizer.
+ * You can customize the behavior by passing different implementations of
+ * {@link HiveAccessController} and {@link HiveAuthorizationValidator} to constructor.
+ *
+ */
+@Public
+@Evolving
+public class HiveAuthorizerImpl implements HiveAuthorizer {
+ HiveAccessController accessController;
+ HiveAuthorizationValidator authValidator;
+
+ HiveAuthorizerImpl(HiveAccessController accessController, HiveAuthorizationValidator authValidator){
+ this.accessController = accessController;
+ this.authValidator = authValidator;
+ }
+
+ @Override
+ public void grantPrivileges(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) {
+ accessController.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject,
+ grantorPrincipal, grantOption);
+ }
+
+ @Override
+ public void revokePrivileges(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) {
+ accessController.revokePrivileges(hivePrincipals, hivePrivileges, hivePrivObject,
+ grantorPrincipal, grantOption);
+ }
+
+ @Override
+ public void createRole(String roleName, HivePrincipal adminGrantor) {
+ accessController.createRole(roleName, adminGrantor);
+ }
+
+ @Override
+ public void dropRole(String roleName) {
+ accessController.dropRole(roleName);
+ }
+
+ @Override
+ public List getRoles(HivePrincipal hivePrincipal) {
+ return accessController.getRoles(hivePrincipal);
+ }
+
+ @Override
+ public void grantRole(List hivePrincipals, List roles,
+ boolean grantOption, HivePrincipal grantorPrinc) {
+ accessController.grantRole(hivePrincipals, roles, grantOption, grantorPrinc);
+ }
+
+ @Override
+ public void revokeRole(List hivePrincipals, List roles,
+ boolean grantOption, HivePrincipal grantorPrinc) {
+ accessController.revokeRole(hivePrincipals, roles, grantOption, grantorPrinc);
+ }
+
+ @Override
+ public void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs,
+ List outputHObjs) {
+ authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs);
+ }
+
+
+ // other access control functions
+
+// void validateAuthority(HiveAction, inputs, outputs){
+// authValidator.validateAuthority(HiveAction, inputs, outputs);
+// }
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java
new file mode 100644
index 0000000..4208b2d
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactory.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+/**
+ * Factory for getting current valid instance of IMetaStoreClient
+ */
+@Public
+public interface HiveMetastoreClientFactory {
+ IMetaStoreClient getHiveMetastoreClient() throws IOException;
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactoryImpl.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactoryImpl.java
new file mode 100644
index 0000000..dec7c10
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveMetastoreClientFactoryImpl.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+
+import java.io.IOException;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+/**
+ * Private implementaiton that returns instance of IMetaStoreClient
+ */
+@Private
+public class HiveMetastoreClientFactoryImpl implements HiveMetastoreClientFactory{
+
+ @Override
+ public IMetaStoreClient getHiveMetastoreClient() throws IOException {
+ try {
+ return Hive.get().getMSC();
+ } catch (Exception e) {
+ throw new IOException(e);
+ }
+ }
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
new file mode 100644
index 0000000..183bc97
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
+
+/**
+ * List of hive operations types.
+ */
+@Public
+public enum HiveOperationType {
+ EXPLAIN,
+ LOAD,
+ EXPORT,
+ IMPORT,
+ CREATEDATABASE,
+ DROPDATABASE,
+ SWITCHDATABASE,
+ LOCKDB,
+ UNLOCKDB,
+ DROPTABLE ,
+ DESCTABLE,
+ DESCFUNCTION,
+ MSCK,
+ ALTERTABLE_ADDCOLS,
+ ALTERTABLE_REPLACECOLS,
+ ALTERTABLE_RENAMECOL,
+ ALTERTABLE_RENAMEPART,
+ ALTERTABLE_RENAME,
+ ALTERTABLE_DROPPARTS,
+ ALTERTABLE_ADDPARTS,
+ ALTERTABLE_TOUCH,
+ ALTERTABLE_ARCHIVE,
+ ALTERTABLE_UNARCHIVE,
+ ALTERTABLE_PROPERTIES,
+ ALTERTABLE_SERIALIZER,
+ ALTERPARTITION_SERIALIZER,
+ ALTERTABLE_SERDEPROPERTIES,
+ ALTERPARTITION_SERDEPROPERTIES,
+ ALTERTABLE_CLUSTER_SORT,
+ ANALYZE_TABLE,
+ ALTERTABLE_BUCKETNUM,
+ ALTERPARTITION_BUCKETNUM,
+ SHOWDATABASES,
+ SHOWTABLES,
+ SHOWCOLUMNS,
+ SHOW_TABLESTATUS,
+ SHOW_TBLPROPERTIES,
+ SHOW_CREATETABLE,
+ SHOWFUNCTIONS,
+ SHOWINDEXES,
+ SHOWPARTITIONS,
+ SHOWLOCKS,
+ CREATEFUNCTION,
+ DROPFUNCTION,
+ CREATEMACRO,
+ DROPMACRO,
+ CREATEVIEW,
+ DROPVIEW,
+ CREATEINDEX,
+ DROPINDEX,
+ ALTERINDEX_REBUILD,
+ ALTERVIEW_PROPERTIES,
+ DROPVIEW_PROPERTIES,
+ LOCKTABLE,
+ UNLOCKTABLE,
+ CREATEROLE,
+ DROPROLE,
+ GRANT_PRIVILEGE,
+ REVOKE_PRIVILEGE,
+ SHOW_GRANT,
+ GRANT_ROLE,
+ REVOKE_ROLE,
+ SHOW_ROLE_GRANT,
+ ALTERTABLE_PROTECTMODE,
+ ALTERPARTITION_PROTECTMODE,
+ ALTERTABLE_FILEFORMAT,
+ ALTERPARTITION_FILEFORMAT,
+ ALTERTABLE_LOCATION,
+ ALTERPARTITION_LOCATION,
+ CREATETABLE,
+ TRUNCATETABLE,
+ CREATETABLE_AS_SELECT,
+ QUERY,
+ ALTERINDEX_PROPS,
+ ALTERDATABASE,
+ DESCDATABASE,
+ ALTERTABLE_MERGEFILES,
+ ALTERPARTITION_MERGEFILES,
+ ALTERTABLE_SKEWED,
+ ALTERTBLPART_SKEWED_LOCATION,
+ ALTERVIEW_RENAME,
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
new file mode 100644
index 0000000..42e9f23
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+/**
+ * Represents the user or role in grant/revoke statements
+ */
+public class HivePrincipal {
+
+ public enum HivePrincipalType{
+ USER, ROLE, UNKNOWN
+ }
+
+ private final String name;
+ private final HivePrincipalType type;
+
+ public HivePrincipal(String name, HivePrincipalType type){
+ this.name = name;
+ this.type = type;
+ }
+ public String getName() {
+ return name;
+ }
+ public HivePrincipalType getType() {
+ return type;
+ }
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java
new file mode 100644
index 0000000..4b9d133
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import java.util.List;
+
+/**
+ * Represents the hive privilege being granted/revoked
+ */
+public class HivePrivilege {
+ private final String name;
+ private final List columns;
+
+ public HivePrivilege(String name, List columns){
+ this.name = name;
+ this.columns = columns;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public List getColumns() {
+ return columns;
+ }
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
new file mode 100644
index 0000000..5b101c2
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.Public;
+import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;
+
+/**
+ * Represents the object on which privilege is being granted/revoked
+ */
+@Public
+@Unstable
+public class HivePrivilegeObject {
+
+ public enum HivePrivilegeObjectType { DATABASE, TABLE, VIEW, PARTITION, URI};
+ private final HivePrivilegeObjectType type;
+ private final String dbname;
+ private final String tableviewname;
+
+ public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableviewname){
+ this.type = type;
+ this.dbname = dbname;
+ this.tableviewname = tableviewname;
+ }
+
+ public HivePrivilegeObjectType getType() {
+ return type;
+ }
+
+ public String getDbname() {
+ return dbname;
+ }
+
+ public String getTableviewname() {
+ return tableviewname;
+ }
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index ef35f1a..aeeff4b 100644
--- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -54,7 +54,11 @@
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener;
import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl;
import org.apache.hadoop.hive.ql.util.DosToUnix;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.util.ReflectionUtils;
@@ -118,6 +122,10 @@
private HiveAuthorizationProvider authorizer;
+ private HiveAuthorizer authorizerV2;
+
+ public enum AuthorizationMode{V1, V2};
+
private HiveAuthenticationProvider authenticator;
private CreateTableAutomaticGrant createTableGrants;
@@ -297,15 +305,33 @@ public static SessionState start(SessionState startSs) {
// that would cause ClassNoFoundException otherwise
throw new RuntimeException(e);
}
+ setupAuth(startSs);
+ return startSs;
+ }
+ /**
+ * Setup authentication and authorization plugins for this session.
+ * @param startSs
+ */
+ private static void setupAuth(SessionState startSs) {
try {
startSs.authenticator = HiveUtils.getAuthenticator(
startSs.getConf(),HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
startSs.authorizer = HiveUtils.getAuthorizeProviderManager(
startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
- startSs.authenticator);
- startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs
- .getConf());
+ startSs.authenticator, true);
+
+ if(startSs.authorizer == null){
+ //if it was null, the new authorization plugin must be specified in config
+ HiveAuthorizerFactory authorizerFactory =
+ HiveUtils.getAuthorizerFactory(startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
+ startSs.authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(),
+ startSs.getConf(), startSs.authenticator.getUserName());
+ }
+ else{
+ startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs
+ .getConf());
+ }
} catch (HiveException e) {
throw new RuntimeException(e);
}
@@ -323,8 +349,7 @@ public static SessionState start(SessionState startSs) {
} else {
LOG.info("No Tez session required at this point. hive.execution.engine=mr.");
}
-
- return startSs;
+ return;
}
/**
@@ -749,6 +774,10 @@ public void setAuthorizer(HiveAuthorizationProvider authorizer) {
this.authorizer = authorizer;
}
+ public HiveAuthorizer getAuthorizerV2() {
+ return authorizerV2;
+ }
+
public HiveAuthenticationProvider getAuthenticator() {
return authenticator;
}
@@ -842,6 +871,22 @@ public void close() throws IOException {
}
}
+ public AuthorizationMode getAuthorizationMode(){
+ if(authorizer != null){
+ return AuthorizationMode.V1;
+ }else if(authorizerV2 != null){
+ return AuthorizationMode.V2;
+ }
+ else {
+ //should not happen - this should not get called before this.start() is called
+ throw new RuntimeException("Authorization plugins not initialized!");
+ }
+ }
+
+ public boolean isAuthorizationModeV2(){
+ return getAuthorizationMode() == AuthorizationMode.V2;
+ }
+
/**
* @param resetPerfLogger
* @return Tries to return an instance of the class whose name is configured in
diff --git ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java
new file mode 100644
index 0000000..d5296be
--- /dev/null
+++ ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import static org.junit.Assert.*;
+
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.junit.Test;
+
+/**
+ * Test HiveOperationType
+ */
+public class TestHiveOperationType {
+
+ /**
+ * test that all enums in {@link HiveOperation} match one in @{link HiveOperationType}
+ */
+ @Test
+ public void checkHiveOperationTypeMatch(){
+ for(HiveOperation op : HiveOperation.values()){
+ try {
+ HiveOperationType.valueOf(op.name());
+ }catch(Exception ex){
+ //if value is null or not found, exception would get thrown
+ fail("Unable to find corresponding type in HiveOperationType for " + op);
+ }
+ }
+ assertEquals("Check if HiveOperation, HiveOperationType have same number of instances",
+ HiveOperation.values().length, HiveOperationType.values().length);
+ }
+
+}