diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 9fa9525..9848e0e 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -602,10 +602,16 @@ public class HiveConf extends Configuration {
SEMANTIC_ANALYZER_HOOK("hive.semantic.analyzer.hook", ""),
HIVE_AUTHORIZATION_ENABLED("hive.security.authorization.enabled", false),
+ HIVE_METASTORE_AUTHORIZATION_ENABLED("hive.security.metastore.authorization.enabled", false),
HIVE_AUTHORIZATION_MANAGER("hive.security.authorization.manager",
"org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider"),
HIVE_AUTHENTICATOR_MANAGER("hive.security.authenticator.manager",
"org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator"),
+ HIVE_METASTORE_AUTHORIZATION_MANAGER("hive.security.metastore.authorization.manager",
+ "org.apache.hadoop.hive.ql.security.authorization."
+ + "DefaultHiveMetastoreAuthorizationProvider"),
+ HIVE_METASTORE_AUTHENTICATOR_MANAGER("hive.security.metastore.authenticator.manager",
+ "org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator"),
HIVE_AUTHORIZATION_TABLE_USER_GRANTS("hive.security.authorization.createtable.user.grants", ""),
HIVE_AUTHORIZATION_TABLE_GROUP_GRANTS("hive.security.authorization.createtable.group.grants",
""),
diff --git a/conf/hive-default.xml.template b/conf/hive-default.xml.template
index f332f3a..80e5812 100644
--- a/conf/hive-default.xml.template
+++ b/conf/hive-default.xml.template
@@ -1250,6 +1250,12 @@
+ hive.security.metastore.authorization.enabled
+ false
+ enable or disable hive metastore-side authorization
+
+
+
hive.security.authorization.manager
org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider
the hive client authorization manager class name.
@@ -1258,6 +1264,14 @@
+ hive.security.metastore.authorization.manager
+ org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider
+ the hive client authorization manager class name to be used in the metastore for authorization.
+ The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider.
+
+
+
+
hive.security.authenticator.manager
org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator
hive client authenticator manager class name.
@@ -1265,6 +1279,13 @@
+ hive.security.metastore.authenticator.manager
+ org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator
+ hive client authenticator manager class name to be used in the metastore for authentication.
+ The user defined authenticator should implement interface org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider.
+
+
+
hive.security.authorization.createtable.user.grants
the privileges automatically granted to some users whenever a table gets created.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
index 051da23..eb26e7f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
@@ -321,10 +321,10 @@ public final class HiveUtils {
@SuppressWarnings("unchecked")
public static HiveAuthorizationProvider getAuthorizeProviderManager(
- Configuration conf, HiveAuthenticationProvider authenticator) throws HiveException {
+ Configuration conf, HiveConf.ConfVars authorizationProviderConfKey,
+ HiveAuthenticationProvider authenticator) throws HiveException {
- String clsStr = HiveConf.getVar(conf,
- HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
+ String clsStr = HiveConf.getVar(conf, authorizationProviderConfKey);
HiveAuthorizationProvider ret = null;
try {
@@ -346,11 +346,11 @@ public final class HiveUtils {
}
@SuppressWarnings("unchecked")
- public static HiveAuthenticationProvider getAuthenticator(Configuration conf)
- throws HiveException {
+ public static HiveAuthenticationProvider getAuthenticator(
+ Configuration conf, HiveConf.ConfVars authenticatorConfKey
+ ) throws HiveException {
- String clsStr = HiveConf.getVar(conf,
- HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
+ String clsStr = HiveConf.getVar(conf, authenticatorConfKey);
HiveAuthenticationProvider ret = null;
try {
@@ -370,6 +370,7 @@ public final class HiveUtils {
return ret;
}
+
/**
* Convert FieldSchemas to columnNames with backticks around them.
*/
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java
new file mode 100644
index 0000000..91913d4
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security;
+
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+
+public class HadoopDefaultMetastoreAuthenticator extends HadoopDefaultAuthenticator
+ implements HiveMetastoreAuthenticationProvider {
+
+ @Override
+ public void setMetaStoreHandler(HMSHandler handler) {
+ setConf(handler.getHiveConf());
+ }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java
new file mode 100644
index 0000000..a4607dd
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security;
+
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+
+
+/**
+ * HiveAuthenticationProvider is an interface for authentication. The
+ * implementation should return userNames and groupNames.
+ */
+public interface HiveMetastoreAuthenticationProvider extends HiveAuthenticationProvider{
+
+ /**
+ * Allows invoker of HiveMetastoreAuthenticationProvider to send in a
+ * hive metastore handler that can be used to provide data for any
+ * authentication that needs to be done.
+ * @param handler
+ */
+ void setMetaStoreHandler(HMSHandler handler);
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
new file mode 100644
index 0000000..a2f58f9
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
@@ -0,0 +1,263 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStorePreEventListener;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreDropTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider;
+
+public class AuthorizationPreEventListener extends MetaStorePreEventListener {
+
+ public static final Log LOG = LogFactory.getLog(
+ AuthorizationPreEventListener.class);
+
+ private static HiveConf conf;
+ private static boolean doAuth;
+ private static HiveMetastoreAuthorizationProvider authorizer;
+ private static HiveAuthenticationProvider authenticator;
+
+ public AuthorizationPreEventListener(Configuration config) throws HiveException {
+ super(config);
+
+ conf = new HiveConf(config, AuthorizationPreEventListener.class);
+
+ doAuth = conf.getBoolVar(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_ENABLED);
+
+ if (doAuth) {
+ authenticator = (HiveMetastoreAuthenticationProvider) HiveUtils.getAuthenticator(
+ conf, HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER);
+ authorizer = (HiveMetastoreAuthorizationProvider) HiveUtils.getAuthorizeProviderManager(
+ conf, HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER, authenticator);
+ }
+ }
+
+ @Override
+ public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectException,
+ InvalidOperationException {
+ if (!doAuth) {
+ return; // Metastore-side auth was not turned on, we simply return.
+ }
+
+ authorizer.setMetaStoreHandler(context.getHandler());
+
+ switch (context.getEventType()) {
+ case CREATE_TABLE:
+ authorizeCreateTable((PreCreateTableEvent)context);
+ break;
+ case DROP_TABLE:
+ authorizeDropTable((PreDropTableEvent)context);
+ break;
+ case ALTER_TABLE:
+ authorizeAlterTable((PreAlterTableEvent)context);
+ break;
+ case ADD_PARTITION:
+ authorizeAddPartition((PreAddPartitionEvent)context);
+ break;
+ case DROP_PARTITION:
+ authorizeDropPartition((PreDropPartitionEvent)context);
+ break;
+ case ALTER_PARTITION:
+ authorizeAlterPartition((PreAlterPartitionEvent)context);
+ break;
+ case CREATE_DATABASE:
+ authorizeCreateDatabase((PreCreateDatabaseEvent)context);
+ break;
+ case DROP_DATABASE:
+ authorizeDropDatabase((PreDropDatabaseEvent)context);
+ break;
+ case LOAD_PARTITION_DONE:
+ // noop for now
+ break;
+ default:
+ break;
+ }
+
+ }
+
+ private void authorizeCreateDatabase(PreCreateDatabaseEvent context)
+ throws InvalidOperationException, MetaException {
+ try {
+ authorizer.authorize(new Database(context.getDatabase()),
+ HiveOperation.CREATEDATABASE.getInputRequiredPrivileges(),
+ HiveOperation.CREATEDATABASE.getOutputRequiredPrivileges());
+ } catch (AuthorizationException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (HiveException e) {
+ throw new MetaException(e.getMessage());
+ }
+ }
+
+ private void authorizeDropDatabase(PreDropDatabaseEvent context)
+ throws InvalidOperationException, MetaException {
+ try {
+ authorizer.authorize(new Database(context.getDatabase()),
+ HiveOperation.DROPDATABASE.getInputRequiredPrivileges(),
+ HiveOperation.DROPDATABASE.getOutputRequiredPrivileges());
+ } catch (AuthorizationException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (HiveException e) {
+ throw new MetaException(e.getMessage());
+ }
+ }
+
+ private void authorizeCreateTable(PreCreateTableEvent context)
+ throws InvalidOperationException, MetaException {
+ try {
+ authorizer.authorize(getTableFromApiTable(context.getTable()),
+ HiveOperation.CREATETABLE.getInputRequiredPrivileges(),
+ HiveOperation.CREATETABLE.getOutputRequiredPrivileges());
+ } catch (AuthorizationException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (HiveException e) {
+ throw new MetaException(e.getMessage());
+ }
+ }
+
+ private void authorizeDropTable(PreDropTableEvent context)
+ throws InvalidOperationException, MetaException {
+ try {
+ authorizer.authorize(getTableFromApiTable(context.getTable()),
+ HiveOperation.DROPTABLE.getInputRequiredPrivileges(),
+ HiveOperation.DROPTABLE.getOutputRequiredPrivileges());
+ } catch (AuthorizationException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (HiveException e) {
+ throw new MetaException(e.getMessage());
+ }
+ }
+
+ private void authorizeAlterTable(PreAlterTableEvent context)
+ throws InvalidOperationException, MetaException {
+ try {
+ authorizer.authorize(getTableFromApiTable(context.getOldTable()),
+ null,
+ new Privilege[]{Privilege.ALTER_METADATA});
+ } catch (AuthorizationException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (HiveException e) {
+ throw new MetaException(e.getMessage());
+ }
+ }
+
+ private void authorizeAddPartition(PreAddPartitionEvent context)
+ throws InvalidOperationException, MetaException {
+ try {
+ org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getPartition();
+ authorizer.authorize(getPartitionFromApiPartition(mapiPart, context),
+ HiveOperation.ALTERTABLE_ADDPARTS.getInputRequiredPrivileges(),
+ HiveOperation.ALTERTABLE_ADDPARTS.getOutputRequiredPrivileges());
+ } catch (AuthorizationException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (NoSuchObjectException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (HiveException e) {
+ throw new MetaException(e.getMessage());
+ }
+ }
+
+ private void authorizeDropPartition(PreDropPartitionEvent context)
+ throws InvalidOperationException, MetaException {
+ try {
+ org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getPartition();
+ authorizer.authorize(getPartitionFromApiPartition(mapiPart, context),
+ HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(),
+ HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges());
+ } catch (AuthorizationException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (NoSuchObjectException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (HiveException e) {
+ throw new MetaException(e.getMessage());
+ }
+ }
+
+ private void authorizeAlterPartition(PreAlterPartitionEvent context)
+ throws InvalidOperationException, MetaException {
+ try {
+ org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getNewPartition();
+ authorizer.authorize(getPartitionFromApiPartition(mapiPart, context),
+ null,
+ new Privilege[]{Privilege.ALTER_METADATA});
+ } catch (AuthorizationException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (NoSuchObjectException e) {
+ throw new InvalidOperationException(e.getMessage());
+ } catch (HiveException e) {
+ throw new MetaException(e.getMessage());
+ }
+ }
+
+
+ public Table getTableFromApiTable(org.apache.hadoop.hive.metastore.api.Table apiTable) {
+ org.apache.hadoop.hive.metastore.api.Table tTable = apiTable.deepCopy();
+ if (tTable.getTableType() == null){
+ // TableType specified was null, we need to figure out what type it was.
+ if (MetaStoreUtils.isExternalTable(tTable)){
+ tTable.setTableType(TableType.EXTERNAL_TABLE.toString());
+ } else if (MetaStoreUtils.isIndexTable(tTable)) {
+ tTable.setTableType(TableType.INDEX_TABLE.toString());
+ } else if ((tTable.getSd() == null) || (tTable.getSd().getLocation() == null)) {
+ tTable.setTableType(TableType.VIRTUAL_VIEW.toString());
+ } else {
+ tTable.setTableType(TableType.MANAGED_TABLE.toString());
+ }
+ }
+ Table tbl = new Table(tTable);
+ return tbl;
+ }
+
+ public Partition getPartitionFromApiPartition(
+ org.apache.hadoop.hive.metastore.api.Partition mapiPart,
+ PreEventContext context) throws HiveException, NoSuchObjectException, MetaException {
+ org.apache.hadoop.hive.metastore.api.Partition tPart = mapiPart.deepCopy();
+ org.apache.hadoop.hive.metastore.api.Table t = context.getHandler().get_table(
+ mapiPart.getDbName(), mapiPart.getTableName());
+ if (tPart.getSd() == null){
+ tPart.setSd(t.getSd());
+ }
+ return new Partition(getTableFromApiTable(t),tPart);
+ }
+
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java
new file mode 100644
index 0000000..2f2ceeb
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java
@@ -0,0 +1,502 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.HiveObjectType;
+import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
+import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+public abstract class BitSetCheckedAuthorizationProvider extends
+ HiveAuthorizationProviderBase {
+
+ static class BitSetChecker {
+
+ boolean[] inputCheck = null;
+ boolean[] outputCheck = null;
+
+ public static BitSetChecker getBitSetChecker(Privilege[] inputRequiredPriv,
+ Privilege[] outputRequiredPriv) {
+ BitSetChecker checker = new BitSetChecker();
+ if (inputRequiredPriv != null) {
+ checker.inputCheck = new boolean[inputRequiredPriv.length];
+ for (int i = 0; i < checker.inputCheck.length; i++) {
+ checker.inputCheck[i] = false;
+ }
+ }
+ if (outputRequiredPriv != null) {
+ checker.outputCheck = new boolean[outputRequiredPriv.length];
+ for (int i = 0; i < checker.outputCheck.length; i++) {
+ checker.outputCheck[i] = false;
+ }
+ }
+
+ return checker;
+ }
+
+ }
+
+ @Override
+ public void authorize(Privilege[] inputRequiredPriv,
+ Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException {
+
+ BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv,
+ outputRequiredPriv);
+ boolean[] inputCheck = checker.inputCheck;
+ boolean[] outputCheck = checker.outputCheck;
+
+ authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv,
+ outputCheck);
+ checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv,
+ inputCheck, outputCheck, null, null, null, null);
+ }
+
+ @Override
+ public void authorize(Database db, Privilege[] inputRequiredPriv,
+ Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException {
+
+ BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv,
+ outputRequiredPriv);
+ boolean[] inputCheck = checker.inputCheck;
+ boolean[] outputCheck = checker.outputCheck;
+
+ authorizeUserAndDBPriv(db, inputRequiredPriv, outputRequiredPriv,
+ inputCheck, outputCheck);
+
+ checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv,
+ inputCheck, outputCheck, db.getName(), null, null, null);
+ }
+
+ @Override
+ public void authorize(Table table, Privilege[] inputRequiredPriv,
+ Privilege[] outputRequiredPriv) throws HiveException {
+
+ BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv,
+ outputRequiredPriv);
+ boolean[] inputCheck = checker.inputCheck;
+ boolean[] outputCheck = checker.outputCheck;
+
+ authorizeUserDBAndTable(table, inputRequiredPriv,
+ outputRequiredPriv, inputCheck, outputCheck);
+ checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv,
+ inputCheck, outputCheck, table.getDbName(), table.getTableName(),
+ null, null);
+ }
+
+ @Override
+ public void authorize(Partition part, Privilege[] inputRequiredPriv,
+ Privilege[] outputRequiredPriv) throws HiveException {
+
+ //if the partition does not have partition level privilege, go to table level.
+ Table table = part.getTable();
+ if (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") == null || ("FALSE"
+ .equalsIgnoreCase(table.getParameters().get(
+ "PARTITION_LEVEL_PRIVILEGE")))) {
+ this.authorize(part.getTable(), inputRequiredPriv, outputRequiredPriv);
+ return;
+ }
+
+ BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv,
+ outputRequiredPriv);
+ boolean[] inputCheck = checker.inputCheck;
+ boolean[] outputCheck = checker.outputCheck;
+
+ if (authorizeUserDbAndPartition(part, inputRequiredPriv, outputRequiredPriv,
+ inputCheck, outputCheck)){
+ return;
+ }
+
+ checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv,
+ inputCheck, outputCheck, part.getTable().getDbName(), part
+ .getTable().getTableName(), part.getName(), null);
+ }
+
+ @Override
+ public void authorize(Table table, Partition part, List columns,
+ Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv)
+ throws HiveException {
+
+ BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv,
+ outputRequiredPriv);
+ boolean[] inputCheck = checker.inputCheck;
+ boolean[] outputCheck = checker.outputCheck;
+
+ String partName = null;
+ List partValues = null;
+ if (part != null
+ && (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE"
+ .equalsIgnoreCase(table.getParameters().get(
+ "PARTITION_LEVEL_PRIVILEGE"))))) {
+ partName = part.getName();
+ partValues = part.getValues();
+ }
+
+ if (partValues == null) {
+ if (authorizeUserDBAndTable(table, inputRequiredPriv, outputRequiredPriv,
+ inputCheck, outputCheck)) {
+ return;
+ }
+ } else {
+ if (authorizeUserDbAndPartition(part, inputRequiredPriv,
+ outputRequiredPriv, inputCheck, outputCheck)) {
+ return;
+ }
+ }
+
+ for (String col : columns) {
+
+ BitSetChecker checker2 = BitSetChecker.getBitSetChecker(
+ inputRequiredPriv, outputRequiredPriv);
+ boolean[] inputCheck2 = checker2.inputCheck;
+ boolean[] outputCheck2 = checker2.outputCheck;
+
+ PrincipalPrivilegeSet partColumnPrivileges = hive_db
+ .get_privilege_set(HiveObjectType.COLUMN, table.getDbName(), table.getTableName(),
+ partValues, col, this.getAuthenticator().getUserName(), this
+ .getAuthenticator().getGroupNames());
+
+ authorizePrivileges(partColumnPrivileges, inputRequiredPriv, inputCheck2,
+ outputRequiredPriv, outputCheck2);
+
+ if (inputCheck2 != null) {
+ booleanArrayOr(inputCheck2, inputCheck);
+ }
+ if (outputCheck2 != null) {
+ booleanArrayOr(inputCheck2, inputCheck);
+ }
+
+ checkAndThrowAuthorizationException(inputRequiredPriv,
+ outputRequiredPriv, inputCheck2, outputCheck2, table.getDbName(),
+ table.getTableName(), partName, col);
+ }
+ }
+
+ protected boolean authorizeUserPriv(Privilege[] inputRequiredPriv,
+ boolean[] inputCheck, Privilege[] outputRequiredPriv,
+ boolean[] outputCheck) throws HiveException {
+ PrincipalPrivilegeSet privileges = hive_db.get_privilege_set(
+ HiveObjectType.GLOBAL, null, null, null, null, this.getAuthenticator()
+ .getUserName(), this.getAuthenticator().getGroupNames());
+ return authorizePrivileges(privileges, inputRequiredPriv, inputCheck,
+ outputRequiredPriv, outputCheck);
+ }
+
+ /**
+ * Check privileges on User and DB. This is used before doing a check on
+ * table/partition objects, first check the user and DB privileges. If it
+ * passed on this check, no need to check against the table/partition hive
+ * object.
+ *
+ * @param db
+ * @param inputRequiredPriv
+ * @param outputRequiredPriv
+ * @param inputCheck
+ * @param outputCheck
+ * @return true if the check on user and DB privilege passed, which means no
+ * need for privilege check on concrete hive objects.
+ * @throws HiveException
+ */
+ private boolean authorizeUserAndDBPriv(Database db,
+ Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv,
+ boolean[] inputCheck, boolean[] outputCheck) throws HiveException {
+ if (authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv,
+ outputCheck)) {
+ return true;
+ }
+
+ PrincipalPrivilegeSet dbPrivileges = hive_db.get_privilege_set(
+ HiveObjectType.DATABASE, db.getName(), null, null, null, this
+ .getAuthenticator().getUserName(), this.getAuthenticator()
+ .getGroupNames());
+
+ if (authorizePrivileges(dbPrivileges, inputRequiredPriv, inputCheck,
+ outputRequiredPriv, outputCheck)) {
+ return true;
+ }
+
+ return false;
+ }
+
+ /**
+ * Check privileges on User, DB and table objects.
+ *
+ * @param table
+ * @param inputRequiredPriv
+ * @param outputRequiredPriv
+ * @param inputCheck
+ * @param outputCheck
+ * @return true if the check passed
+ * @throws HiveException
+ */
+ private boolean authorizeUserDBAndTable(Table table,
+ Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv,
+ boolean[] inputCheck, boolean[] outputCheck) throws HiveException {
+
+ if (authorizeUserAndDBPriv(hive_db.getDatabase(table.getDbName()),
+ inputRequiredPriv, outputRequiredPriv, inputCheck, outputCheck)) {
+ return true;
+ }
+
+ PrincipalPrivilegeSet tablePrivileges = hive_db.get_privilege_set(
+ HiveObjectType.TABLE, table.getDbName(), table.getTableName(), null,
+ null, this.getAuthenticator().getUserName(), this.getAuthenticator()
+ .getGroupNames());
+
+ if (authorizePrivileges(tablePrivileges, inputRequiredPriv, inputCheck,
+ outputRequiredPriv, outputCheck)) {
+ return true;
+ }
+
+ return false;
+ }
+
+ /**
+ * Check privileges on User, DB and table/Partition objects.
+ *
+ * @param part
+ * @param inputRequiredPriv
+ * @param outputRequiredPriv
+ * @param inputCheck
+ * @param outputCheck
+ * @return true if the check passed
+ * @throws HiveException
+ */
+ private boolean authorizeUserDbAndPartition(Partition part,
+ Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv,
+ boolean[] inputCheck, boolean[] outputCheck) throws HiveException {
+
+ if (authorizeUserAndDBPriv(
+ hive_db.getDatabase(part.getTable().getDbName()), inputRequiredPriv,
+ outputRequiredPriv, inputCheck, outputCheck)) {
+ return true;
+ }
+
+ PrincipalPrivilegeSet partPrivileges = part.getTPartition().getPrivileges();
+ if (partPrivileges == null) {
+ partPrivileges = hive_db.get_privilege_set(HiveObjectType.PARTITION, part
+ .getTable().getDbName(), part.getTable().getTableName(), part
+ .getValues(), null, this.getAuthenticator().getUserName(), this
+ .getAuthenticator().getGroupNames());
+ }
+
+ if (authorizePrivileges(partPrivileges, inputRequiredPriv, inputCheck,
+ outputRequiredPriv, outputCheck)) {
+ return true;
+ }
+
+ return false;
+ }
+
+ protected boolean authorizePrivileges(PrincipalPrivilegeSet privileges,
+ Privilege[] inputPriv, boolean[] inputCheck, Privilege[] outputPriv,
+ boolean[] outputCheck) throws HiveException {
+
+ boolean pass = true;
+ if (inputPriv != null) {
+ pass = pass && matchPrivs(inputPriv, privileges, inputCheck);
+ }
+ if (outputPriv != null) {
+ pass = pass && matchPrivs(outputPriv, privileges, outputCheck);
+ }
+ return pass;
+ }
+
+ /**
+ * try to match an array of privileges from user/groups/roles grants.
+ *
+ * @param container
+ */
+ private boolean matchPrivs(Privilege[] inputPriv,
+ PrincipalPrivilegeSet privileges, boolean[] check) {
+
+ if (inputPriv == null) {
+ return true;
+ }
+
+ if (privileges == null) {
+ return false;
+ }
+
+ /*
+ * user grants
+ */
+ Set privSet = new HashSet();
+ if (privileges.getUserPrivileges() != null
+ && privileges.getUserPrivileges().size() > 0) {
+ Collection> privCollection = privileges.getUserPrivileges().values();
+
+ List userPrivs = getPrivilegeStringList(privCollection);
+ if (userPrivs != null && userPrivs.size() > 0) {
+ for (String priv : userPrivs) {
+ if (priv == null || priv.trim().equals("")) {
+ continue;
+ }
+ if (priv.equalsIgnoreCase(Privilege.ALL.toString())) {
+ setBooleanArray(check, true);
+ return true;
+ }
+ privSet.add(priv.toLowerCase());
+ }
+ }
+ }
+
+ /*
+ * group grants
+ */
+ if (privileges.getGroupPrivileges() != null
+ && privileges.getGroupPrivileges().size() > 0) {
+ Collection> groupPrivCollection = privileges
+ .getGroupPrivileges().values();
+ List groupPrivs = getPrivilegeStringList(groupPrivCollection);
+ if (groupPrivs != null && groupPrivs.size() > 0) {
+ for (String priv : groupPrivs) {
+ if (priv == null || priv.trim().equals("")) {
+ continue;
+ }
+ if (priv.equalsIgnoreCase(Privilege.ALL.toString())) {
+ setBooleanArray(check, true);
+ return true;
+ }
+ privSet.add(priv.toLowerCase());
+ }
+ }
+ }
+
+ /*
+ * roles grants
+ */
+ if (privileges.getRolePrivileges() != null
+ && privileges.getRolePrivileges().size() > 0) {
+ Collection> rolePrivsCollection = privileges
+ .getRolePrivileges().values();
+ ;
+ List rolePrivs = getPrivilegeStringList(rolePrivsCollection);
+ if (rolePrivs != null && rolePrivs.size() > 0) {
+ for (String priv : rolePrivs) {
+ if (priv == null || priv.trim().equals("")) {
+ continue;
+ }
+ if (priv.equalsIgnoreCase(Privilege.ALL.toString())) {
+ setBooleanArray(check, true);
+ return true;
+ }
+ privSet.add(priv.toLowerCase());
+ }
+ }
+ }
+
+ for (int i = 0; i < inputPriv.length; i++) {
+ String toMatch = inputPriv[i].toString();
+ if (!check[i]) {
+ check[i] = privSet.contains(toMatch.toLowerCase());
+ }
+ }
+
+ return firstFalseIndex(check) <0;
+ }
+
+ private List getPrivilegeStringList(
+ Collection> privCollection) {
+ List userPrivs = new ArrayList();
+ if (privCollection!= null && privCollection.size()>0) {
+ for (List grantList : privCollection) {
+ if (grantList == null){
+ continue;
+ }
+ for (int i = 0; i < grantList.size(); i++) {
+ PrivilegeGrantInfo grant = grantList.get(i);
+ userPrivs.add(grant.getPrivilege());
+ }
+ }
+ }
+ return userPrivs;
+ }
+
+ private static void setBooleanArray(boolean[] check, boolean b) {
+ for (int i = 0; i < check.length; i++) {
+ check[i] = b;
+ }
+ }
+
+ private static void booleanArrayOr(boolean[] output, boolean[] input) {
+ for (int i = 0; i < output.length && i < input.length; i++) {
+ output[i] = output[i] || input[i];
+ }
+ }
+
+ private void checkAndThrowAuthorizationException(
+ Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv,
+ boolean[] inputCheck, boolean[] outputCheck,String dbName,
+ String tableName, String partitionName, String columnName) {
+
+ String hiveObject = "{ ";
+ if (dbName != null) {
+ hiveObject = hiveObject + "database:" + dbName;
+ }
+ if (tableName != null) {
+ hiveObject = hiveObject + ", table:" + tableName;
+ }
+ if (partitionName != null) {
+ hiveObject = hiveObject + ", partitionName:" + partitionName;
+ }
+ if (columnName != null) {
+ hiveObject = hiveObject + ", columnName:" + columnName;
+ }
+ hiveObject = hiveObject + "}";
+
+ if (inputCheck != null) {
+ int input = this.firstFalseIndex(inputCheck);
+ if (input >= 0) {
+ throw new AuthorizationException("No privilege '"
+ + inputRequiredPriv[input].toString() + "' found for inputs "
+ + hiveObject);
+ }
+ }
+
+ if (outputCheck != null) {
+ int output = this.firstFalseIndex(outputCheck);
+ if (output >= 0) {
+ throw new AuthorizationException("No privilege '"
+ + outputRequiredPriv[output].toString() + "' found for outputs "
+ + hiveObject);
+ }
+ }
+ }
+
+ private int firstFalseIndex(boolean[] inputCheck) {
+ if (inputCheck != null) {
+ for (int i = 0; i < inputCheck.length; i++) {
+ if (!inputCheck[i]) {
+ return i;
+ }
+ }
+ }
+ return -1;
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java
index 66c8cd7..2fa512c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java
@@ -18,478 +18,16 @@
package org.apache.hadoop.hive.ql.security.authorization;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.HiveObjectType;
-import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
-import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.hive.ql.metadata.Table;
public class DefaultHiveAuthorizationProvider extends
- HiveAuthorizationProviderBase {
-
- static class BitSetChecker {
-
- boolean[] inputCheck = null;
- boolean[] outputCheck = null;
-
- public static BitSetChecker getBitSetChecker(Privilege[] inputRequiredPriv,
- Privilege[] outputRequiredPriv) {
- BitSetChecker checker = new BitSetChecker();
- if (inputRequiredPriv != null) {
- checker.inputCheck = new boolean[inputRequiredPriv.length];
- for (int i = 0; i < checker.inputCheck.length; i++) {
- checker.inputCheck[i] = false;
- }
- }
- if (outputRequiredPriv != null) {
- checker.outputCheck = new boolean[outputRequiredPriv.length];
- for (int i = 0; i < checker.outputCheck.length; i++) {
- checker.outputCheck[i] = false;
- }
- }
-
- return checker;
- }
-
- }
-
- @Override
- public void authorize(Privilege[] inputRequiredPriv,
- Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException {
-
- BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv,
- outputRequiredPriv);
- boolean[] inputCheck = checker.inputCheck;
- boolean[] outputCheck = checker.outputCheck;
-
- authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv,
- outputCheck);
- checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv,
- inputCheck, outputCheck, null, null, null, null);
- }
-
- @Override
- public void authorize(Database db, Privilege[] inputRequiredPriv,
- Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException {
-
- BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv,
- outputRequiredPriv);
- boolean[] inputCheck = checker.inputCheck;
- boolean[] outputCheck = checker.outputCheck;
-
- authorizeUserAndDBPriv(db, inputRequiredPriv, outputRequiredPriv,
- inputCheck, outputCheck);
-
- checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv,
- inputCheck, outputCheck, db.getName(), null, null, null);
- }
-
- @Override
- public void authorize(Table table, Privilege[] inputRequiredPriv,
- Privilege[] outputRequiredPriv) throws HiveException {
- BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv,
- outputRequiredPriv);
- boolean[] inputCheck = checker.inputCheck;
- boolean[] outputCheck = checker.outputCheck;
-
- authorizeUserDBAndTable(table, inputRequiredPriv,
- outputRequiredPriv, inputCheck, outputCheck);
- checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv,
- inputCheck, outputCheck, table.getDbName(), table.getTableName(),
- null, null);
- }
-
- @Override
- public void authorize(Partition part, Privilege[] inputRequiredPriv,
- Privilege[] outputRequiredPriv) throws HiveException {
-
- //if the partition does not have partition level privilege, go to table level.
- Table table = part.getTable();
- if (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") == null || ("FALSE"
- .equalsIgnoreCase(table.getParameters().get(
- "PARTITION_LEVEL_PRIVILEGE")))) {
- this.authorize(part.getTable(), inputRequiredPriv, outputRequiredPriv);
- return;
- }
-
- BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv,
- outputRequiredPriv);
- boolean[] inputCheck = checker.inputCheck;
- boolean[] outputCheck = checker.outputCheck;
-
- if (authorizeUserDbAndPartition(part, inputRequiredPriv, outputRequiredPriv,
- inputCheck, outputCheck)){
- return;
- }
-
- checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv,
- inputCheck, outputCheck, part.getTable().getDbName(), part
- .getTable().getTableName(), part.getName(), null);
- }
-
- @Override
- public void authorize(Table table, Partition part, List columns,
- Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv)
- throws HiveException {
- BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv,
- outputRequiredPriv);
- boolean[] inputCheck = checker.inputCheck;
- boolean[] outputCheck = checker.outputCheck;
-
- String partName = null;
- List partValues = null;
- if (part != null
- && (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE"
- .equalsIgnoreCase(table.getParameters().get(
- "PARTITION_LEVEL_PRIVILEGE"))))) {
- partName = part.getName();
- partValues = part.getValues();
- }
-
- if (partValues == null) {
- if (authorizeUserDBAndTable(table, inputRequiredPriv, outputRequiredPriv,
- inputCheck, outputCheck)) {
- return;
- }
- } else {
- if (authorizeUserDbAndPartition(part, inputRequiredPriv,
- outputRequiredPriv, inputCheck, outputCheck)) {
- return;
- }
- }
-
- for (String col : columns) {
-
- BitSetChecker checker2 = BitSetChecker.getBitSetChecker(
- inputRequiredPriv, outputRequiredPriv);
- boolean[] inputCheck2 = checker2.inputCheck;
- boolean[] outputCheck2 = checker2.outputCheck;
-
- PrincipalPrivilegeSet partColumnPrivileges = hive_db
- .get_privilege_set(HiveObjectType.COLUMN, table.getDbName(), table.getTableName(),
- partValues, col, this.getAuthenticator().getUserName(), this
- .getAuthenticator().getGroupNames());
-
- authorizePrivileges(partColumnPrivileges, inputRequiredPriv, inputCheck2,
- outputRequiredPriv, outputCheck2);
-
- if (inputCheck2 != null) {
- booleanArrayOr(inputCheck2, inputCheck);
- }
- if (outputCheck2 != null) {
- booleanArrayOr(inputCheck2, inputCheck);
- }
-
- checkAndThrowAuthorizationException(inputRequiredPriv,
- outputRequiredPriv, inputCheck2, outputCheck2, table.getDbName(),
- table.getTableName(), partName, col);
- }
- }
-
- protected boolean authorizeUserPriv(Privilege[] inputRequiredPriv,
- boolean[] inputCheck, Privilege[] outputRequiredPriv,
- boolean[] outputCheck) throws HiveException {
- PrincipalPrivilegeSet privileges = hive_db.get_privilege_set(
- HiveObjectType.GLOBAL, null, null, null, null, this.getAuthenticator()
- .getUserName(), this.getAuthenticator().getGroupNames());
- return authorizePrivileges(privileges, inputRequiredPriv, inputCheck,
- outputRequiredPriv, outputCheck);
- }
-
- /**
- * Check privileges on User and DB. This is used before doing a check on
- * table/partition objects, first check the user and DB privileges. If it
- * passed on this check, no need to check against the table/partition hive
- * object.
- *
- * @param db
- * @param inputRequiredPriv
- * @param outputRequiredPriv
- * @param inputCheck
- * @param outputCheck
- * @return true if the check on user and DB privilege passed, which means no
- * need for privilege check on concrete hive objects.
- * @throws HiveException
- */
- private boolean authorizeUserAndDBPriv(Database db,
- Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv,
- boolean[] inputCheck, boolean[] outputCheck) throws HiveException {
- if (authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv,
- outputCheck)) {
- return true;
- }
-
- PrincipalPrivilegeSet dbPrivileges = hive_db.get_privilege_set(
- HiveObjectType.DATABASE, db.getName(), null, null, null, this
- .getAuthenticator().getUserName(), this.getAuthenticator()
- .getGroupNames());
-
- if (authorizePrivileges(dbPrivileges, inputRequiredPriv, inputCheck,
- outputRequiredPriv, outputCheck)) {
- return true;
- }
-
- return false;
- }
-
- /**
- * Check privileges on User, DB and table objects.
- *
- * @param table
- * @param inputRequiredPriv
- * @param outputRequiredPriv
- * @param inputCheck
- * @param outputCheck
- * @return true if the check passed
- * @throws HiveException
- */
- private boolean authorizeUserDBAndTable(Table table,
- Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv,
- boolean[] inputCheck, boolean[] outputCheck) throws HiveException {
-
- if (authorizeUserAndDBPriv(hive_db.getDatabase(table.getDbName()),
- inputRequiredPriv, outputRequiredPriv, inputCheck, outputCheck)) {
- return true;
- }
-
- PrincipalPrivilegeSet tablePrivileges = hive_db.get_privilege_set(
- HiveObjectType.TABLE, table.getDbName(), table.getTableName(), null,
- null, this.getAuthenticator().getUserName(), this.getAuthenticator()
- .getGroupNames());
-
- if (authorizePrivileges(tablePrivileges, inputRequiredPriv, inputCheck,
- outputRequiredPriv, outputCheck)) {
- return true;
- }
-
- return false;
- }
-
- /**
- * Check privileges on User, DB and table/Partition objects.
- *
- * @param part
- * @param inputRequiredPriv
- * @param outputRequiredPriv
- * @param inputCheck
- * @param outputCheck
- * @return true if the check passed
- * @throws HiveException
- */
- private boolean authorizeUserDbAndPartition(Partition part,
- Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv,
- boolean[] inputCheck, boolean[] outputCheck) throws HiveException {
-
- if (authorizeUserAndDBPriv(
- hive_db.getDatabase(part.getTable().getDbName()), inputRequiredPriv,
- outputRequiredPriv, inputCheck, outputCheck)) {
- return true;
- }
-
- PrincipalPrivilegeSet partPrivileges = part.getTPartition().getPrivileges();
- if (partPrivileges == null) {
- partPrivileges = hive_db.get_privilege_set(HiveObjectType.PARTITION, part
- .getTable().getDbName(), part.getTable().getTableName(), part
- .getValues(), null, this.getAuthenticator().getUserName(), this
- .getAuthenticator().getGroupNames());
- }
+ BitSetCheckedAuthorizationProvider {
- if (authorizePrivileges(partPrivileges, inputRequiredPriv, inputCheck,
- outputRequiredPriv, outputCheck)) {
- return true;
- }
-
- return false;
- }
-
- protected boolean authorizePrivileges(PrincipalPrivilegeSet privileges,
- Privilege[] inputPriv, boolean[] inputCheck, Privilege[] outputPriv,
- boolean[] outputCheck) throws HiveException {
-
- boolean pass = true;
- if (inputPriv != null) {
- pass = pass && matchPrivs(inputPriv, privileges, inputCheck);
- }
- if (outputPriv != null) {
- pass = pass && matchPrivs(outputPriv, privileges, outputCheck);
- }
- return pass;
+ public void init(Configuration conf) throws HiveException {
+ hive_db = new HiveProxy(Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class)));
}
- /**
- * try to match an array of privileges from user/groups/roles grants.
- *
- * @param container
- */
- private boolean matchPrivs(Privilege[] inputPriv,
- PrincipalPrivilegeSet privileges, boolean[] check) {
-
- if (inputPriv == null)
- return true;
-
- if (privileges == null)
- return false;
-
- /*
- * user grants
- */
- Set privSet = new HashSet();
- if (privileges.getUserPrivileges() != null
- && privileges.getUserPrivileges().size() > 0) {
- Collection> privCollection = privileges.getUserPrivileges().values();
-
- List userPrivs = getPrivilegeStringList(privCollection);
- if (userPrivs != null && userPrivs.size() > 0) {
- for (String priv : userPrivs) {
- if (priv == null || priv.trim().equals(""))
- continue;
- if (priv.equalsIgnoreCase(Privilege.ALL.toString())) {
- setBooleanArray(check, true);
- return true;
- }
- privSet.add(priv.toLowerCase());
- }
- }
- }
-
- /*
- * group grants
- */
- if (privileges.getGroupPrivileges() != null
- && privileges.getGroupPrivileges().size() > 0) {
- Collection> groupPrivCollection = privileges
- .getGroupPrivileges().values();
- List groupPrivs = getPrivilegeStringList(groupPrivCollection);
- if (groupPrivs != null && groupPrivs.size() > 0) {
- for (String priv : groupPrivs) {
- if (priv == null || priv.trim().equals(""))
- continue;
- if (priv.equalsIgnoreCase(Privilege.ALL.toString())) {
- setBooleanArray(check, true);
- return true;
- }
- privSet.add(priv.toLowerCase());
- }
- }
- }
-
- /*
- * roles grants
- */
- if (privileges.getRolePrivileges() != null
- && privileges.getRolePrivileges().size() > 0) {
- Collection> rolePrivsCollection = privileges
- .getRolePrivileges().values();
- ;
- List rolePrivs = getPrivilegeStringList(rolePrivsCollection);
- if (rolePrivs != null && rolePrivs.size() > 0) {
- for (String priv : rolePrivs) {
- if (priv == null || priv.trim().equals(""))
- continue;
- if (priv.equalsIgnoreCase(Privilege.ALL.toString())) {
- setBooleanArray(check, true);
- return true;
- }
- privSet.add(priv.toLowerCase());
- }
- }
- }
-
- for (int i = 0; i < inputPriv.length; i++) {
- String toMatch = inputPriv[i].toString();
- if (!check[i]) {
- check[i] = privSet.contains(toMatch.toLowerCase());
- }
- }
-
- return firstFalseIndex(check) <0;
- }
-
- private List getPrivilegeStringList(
- Collection> privCollection) {
- List userPrivs = new ArrayList();
- if (privCollection!= null && privCollection.size()>0) {
- for (List grantList : privCollection) {
- if (grantList == null){
- continue;
- }
- for (int i = 0; i < grantList.size(); i++) {
- PrivilegeGrantInfo grant = grantList.get(i);
- userPrivs.add(grant.getPrivilege());
- }
- }
- }
- return userPrivs;
- }
-
- private static void setBooleanArray(boolean[] check, boolean b) {
- for (int i = 0; i < check.length; i++) {
- check[i] = b;
- }
- }
-
- private static void booleanArrayOr(boolean[] output, boolean[] input) {
- for (int i = 0; i < output.length && i < input.length; i++) {
- output[i] = output[i] || input[i];
- }
- }
-
- private void checkAndThrowAuthorizationException(
- Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv,
- boolean[] inputCheck, boolean[] outputCheck,String dbName,
- String tableName, String partitionName, String columnName) {
-
- String hiveObject = "{ ";
- if (dbName != null) {
- hiveObject = hiveObject + "database:" + dbName;
- }
- if (tableName != null) {
- hiveObject = hiveObject + ", table:" + tableName;
- }
- if (partitionName != null) {
- hiveObject = hiveObject + ", partitionName:" + partitionName;
- }
- if (columnName != null) {
- hiveObject = hiveObject + ", columnName:" + columnName;
- }
- hiveObject = hiveObject + "}";
-
- if (inputCheck != null) {
- int input = this.firstFalseIndex(inputCheck);
- if (input >= 0) {
- throw new AuthorizationException("No privilege '"
- + inputRequiredPriv[input].toString() + "' found for inputs "
- + hiveObject);
- }
- }
-
- if (outputCheck != null) {
- int output = this.firstFalseIndex(outputCheck);
- if (output >= 0) {
- throw new AuthorizationException("No privilege '"
- + outputRequiredPriv[output].toString() + "' found for outputs "
- + hiveObject);
- }
- }
- }
-
- private int firstFalseIndex(boolean[] inputCheck) {
- if (inputCheck != null) {
- for (int i = 0; i < inputCheck.length; i++) {
- if (!inputCheck[i]) {
- return i;
- }
- }
- }
- return -1;
- }
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java
new file mode 100644
index 0000000..6685645
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+public class DefaultHiveMetastoreAuthorizationProvider extends BitSetCheckedAuthorizationProvider
+ implements HiveMetastoreAuthorizationProvider {
+
+ @Override
+ public void init(Configuration conf) throws HiveException {
+ hive_db = new HiveProxy();
+ }
+
+ @Override
+ public void setMetaStoreHandler(HMSHandler handler) {
+ hive_db.setHandler(handler);
+ }
+
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
index ddb4231..795064a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
@@ -18,21 +18,90 @@
package org.apache.hadoop.hive.ql.security.authorization;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
+import org.apache.hadoop.hive.metastore.api.HiveObjectType;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.thrift.TException;
public abstract class HiveAuthorizationProviderBase implements
HiveAuthorizationProvider {
-
+
+ protected class HiveProxy {
+
+ private final Hive hiveClient;
+ private HMSHandler handler;
+
+ public HiveProxy(Hive hive) {
+ this.hiveClient = hive;
+ this.handler = null;
+ }
+
+ public HiveProxy() {
+ this.hiveClient = null;
+ this.handler = null;
+ }
+
+ public void setHandler(HMSHandler handler){
+ this.handler = handler;
+ }
+
+ public PrincipalPrivilegeSet get_privilege_set(HiveObjectType column, String dbName,
+ String tableName, List partValues, String col, String userName,
+ List groupNames) throws HiveException {
+ if (hiveClient != null) {
+ return hiveClient.get_privilege_set(
+ column, dbName, tableName, partValues, col, userName, groupNames);
+ } else {
+ HiveObjectRef hiveObj = new HiveObjectRef(column, dbName,
+ tableName, partValues, col);
+ try {
+ return handler.get_privilege_set(hiveObj, userName, groupNames);
+ } catch (MetaException e) {
+ throw new HiveException(e);
+ } catch (TException e) {
+ throw new HiveException(e);
+ }
+ }
+ }
+
+ public Database getDatabase(String dbName) throws HiveException {
+ if (hiveClient != null) {
+ return hiveClient.getDatabase(dbName);
+ } else {
+ try {
+ return handler.get_database(dbName);
+ } catch (NoSuchObjectException e) {
+ throw new HiveException(e);
+ } catch (MetaException e) {
+ throw new HiveException(e);
+ }
+ }
+ }
+
+ }
+
+ protected HiveProxy hive_db;
+
protected HiveAuthenticationProvider authenticator;
- protected Hive hive_db;
-
private Configuration conf;
+ public static final Log LOG = LogFactory.getLog(
+ HiveAuthenticationProvider.class);
+
+
public void setConf(Configuration conf) {
this.conf = conf;
try {
@@ -42,10 +111,6 @@ public abstract class HiveAuthorizationProviderBase implements
}
}
- public void init(Configuration conf) throws HiveException {
- hive_db = Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class));
- }
-
public Configuration getConf() {
return this.conf;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
new file mode 100644
index 0000000..6a4fffb
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+
+/**
+ * HiveMetastoreAuthorizationProvider : An extension of HiveAuthorizaytionProvider
+ * that is intended to be called from the metastore-side. It will be invoked
+ * by AuthorizationPreEventListener.
+ *
+ */
+public interface HiveMetastoreAuthorizationProvider extends HiveAuthorizationProvider {
+
+ /**
+ * Allows invoker of HiveMetaStoreAuthorizationProvider to send in a
+ * hive metastore handler that can be used to make calls to test
+ * whether or not authorizations can/will succeed. Intended to be called
+ * before any of the authorize methods are called.
+ * @param handler
+ */
+ void setMetaStoreHandler(HMSHandler handler);
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 9f76dbe..f6727b7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -282,10 +282,11 @@ public class SessionState {
}
try {
- startSs.authenticator = HiveUtils.getAuthenticator(startSs
- .getConf());
- startSs.authorizer = HiveUtils.getAuthorizeProviderManager(startSs
- .getConf(), startSs.authenticator);
+ startSs.authenticator = HiveUtils.getAuthenticator(
+ startSs.getConf(),HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
+ startSs.authorizer = HiveUtils.getAuthorizeProviderManager(
+ startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
+ startSs.authenticator);
startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs
.getConf());
} catch (HiveException e) {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java b/ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
new file mode 100644
index 0000000..195a5a4
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
@@ -0,0 +1,204 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.Privilege;
+
+public class DummyHiveMetastoreAuthorizationProvider implements HiveMetastoreAuthorizationProvider {
+
+
+ protected HiveAuthenticationProvider authenticator;
+
+ public enum AuthCallContextType {
+ USER,
+ DB,
+ TABLE,
+ PARTITION,
+ TABLE_AND_PARTITION
+ };
+
+ class AuthCallContext {
+
+ public AuthCallContextType type;
+ public List