diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 818d592..cc529d5 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -602,10 +602,16 @@ public class HiveConf extends Configuration { SEMANTIC_ANALYZER_HOOK("hive.semantic.analyzer.hook", ""), HIVE_AUTHORIZATION_ENABLED("hive.security.authorization.enabled", false), + HIVE_METASTORE_AUTHORIZATION_ENABLED("hive.security.metastore.authorization.enabled", false), HIVE_AUTHORIZATION_MANAGER("hive.security.authorization.manager", "org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider"), HIVE_AUTHENTICATOR_MANAGER("hive.security.authenticator.manager", "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator"), + HIVE_METASTORE_AUTHORIZATION_MANAGER("hive.security.metastore.authorization.manager", + "org.apache.hadoop.hive.ql.security.authorization." + + "DefaultHiveMetastoreAuthorizationProvider"), + HIVE_METASTORE_AUTHENTICATOR_MANAGER("hive.security.metastore.authenticator.manager", + "org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator"), HIVE_AUTHORIZATION_TABLE_USER_GRANTS("hive.security.authorization.createtable.user.grants", ""), HIVE_AUTHORIZATION_TABLE_GROUP_GRANTS("hive.security.authorization.createtable.group.grants", ""), diff --git a/conf/hive-default.xml.template b/conf/hive-default.xml.template index 92156ac..e9aa1cb 100644 --- a/conf/hive-default.xml.template +++ b/conf/hive-default.xml.template @@ -1250,6 +1250,12 @@ + hive.security.metastore.authorization.enabled + false + enable or disable hive metastore-side authorization + + + hive.security.authorization.manager org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider the hive client authorization manager class name. @@ -1258,6 +1264,14 @@ + hive.security.metastore.authorization.manager + org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider + the hive client authorization manager class name to be used in the metastore for authorization. + The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider. + + + + hive.security.authenticator.manager org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator hive client authenticator manager class name. @@ -1265,6 +1279,13 @@ + hive.security.metastore.authenticator.manager + org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator + hive client authenticator manager class name to be used in the metastore for authentication. + The user defined authenticator should implement interface org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider. + + + hive.security.authorization.createtable.user.grants the privileges automatically granted to some users whenever a table gets created. diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 66fd0dc..c80aed1 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -365,6 +365,10 @@ public class HiveMetaStore extends ThriftHiveMetastore { return conf; } + public Warehouse getWh() { + return wh; + } + /** * Get a cached RawStore. * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java index 051da23..eb26e7f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java @@ -321,10 +321,10 @@ public final class HiveUtils { @SuppressWarnings("unchecked") public static HiveAuthorizationProvider getAuthorizeProviderManager( - Configuration conf, HiveAuthenticationProvider authenticator) throws HiveException { + Configuration conf, HiveConf.ConfVars authorizationProviderConfKey, + HiveAuthenticationProvider authenticator) throws HiveException { - String clsStr = HiveConf.getVar(conf, - HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); + String clsStr = HiveConf.getVar(conf, authorizationProviderConfKey); HiveAuthorizationProvider ret = null; try { @@ -346,11 +346,11 @@ public final class HiveUtils { } @SuppressWarnings("unchecked") - public static HiveAuthenticationProvider getAuthenticator(Configuration conf) - throws HiveException { + public static HiveAuthenticationProvider getAuthenticator( + Configuration conf, HiveConf.ConfVars authenticatorConfKey + ) throws HiveException { - String clsStr = HiveConf.getVar(conf, - HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER); + String clsStr = HiveConf.getVar(conf, authenticatorConfKey); HiveAuthenticationProvider ret = null; try { @@ -370,6 +370,7 @@ public final class HiveUtils { return ret; } + /** * Convert FieldSchemas to columnNames with backticks around them. */ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java index cddfadd..c7dad79 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultAuthenticator.java @@ -30,7 +30,7 @@ public class HadoopDefaultAuthenticator implements HiveAuthenticationProvider { private String userName; private List groupNames; - + private Configuration conf; @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java new file mode 100644 index 0000000..91913d4 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/HadoopDefaultMetastoreAuthenticator.java @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; + +public class HadoopDefaultMetastoreAuthenticator extends HadoopDefaultAuthenticator + implements HiveMetastoreAuthenticationProvider { + + @Override + public void setMetaStoreHandler(HMSHandler handler) { + setConf(handler.getHiveConf()); + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java new file mode 100644 index 0000000..a4607dd --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java @@ -0,0 +1,38 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; + + +/** + * HiveAuthenticationProvider is an interface for authentication. The + * implementation should return userNames and groupNames. + */ +public interface HiveMetastoreAuthenticationProvider extends HiveAuthenticationProvider{ + + /** + * Allows invoker of HiveMetastoreAuthenticationProvider to send in a + * hive metastore handler that can be used to provide data for any + * authentication that needs to be done. + * @param handler + */ + void setMetaStoreHandler(HMSHandler handler); + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java new file mode 100644 index 0000000..29e27c6 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java @@ -0,0 +1,263 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.MetaStorePreEventListener; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.TableType; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.InvalidOperationException; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent; +import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent; +import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent; +import org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent; +import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent; +import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent; +import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent; +import org.apache.hadoop.hive.metastore.events.PreDropTableEvent; +import org.apache.hadoop.hive.metastore.events.PreEventContext; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.HiveUtils; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.plan.HiveOperation; +import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider; + +public class AuthorizationPreEventListener extends MetaStorePreEventListener { + + public static final Log LOG = LogFactory.getLog( + AuthorizationPreEventListener.class); + + private static HiveConf conf; + private static boolean doAuth; + private static HiveMetastoreAuthorizationProvider authorizer; + private static HiveMetastoreAuthenticationProvider authenticator; + + public AuthorizationPreEventListener(Configuration config) throws HiveException { + super(config); + + conf = new HiveConf(config, AuthorizationPreEventListener.class); + + doAuth = conf.getBoolVar(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_ENABLED); + + if (doAuth) { + authenticator = (HiveMetastoreAuthenticationProvider) HiveUtils.getAuthenticator( + conf, HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER); + authorizer = (HiveMetastoreAuthorizationProvider) HiveUtils.getAuthorizeProviderManager( + conf, HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER, authenticator); + } + } + + @Override + public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectException, + InvalidOperationException { + if (!doAuth) { + return; // Metastore-side auth was not turned on, we simply return. + } + + authenticator.setMetaStoreHandler(context.getHandler()); + authorizer.setMetaStoreHandler(context.getHandler()); + + switch (context.getEventType()) { + case CREATE_TABLE: + authorizeCreateTable((PreCreateTableEvent)context); + break; + case DROP_TABLE: + authorizeDropTable((PreDropTableEvent)context); + break; + case ALTER_TABLE: + authorizeAlterTable((PreAlterTableEvent)context); + break; + case ADD_PARTITION: + authorizeAddPartition((PreAddPartitionEvent)context); + break; + case DROP_PARTITION: + authorizeDropPartition((PreDropPartitionEvent)context); + break; + case ALTER_PARTITION: + authorizeAlterPartition((PreAlterPartitionEvent)context); + break; + case CREATE_DATABASE: + authorizeCreateDatabase((PreCreateDatabaseEvent)context); + break; + case DROP_DATABASE: + authorizeDropDatabase((PreDropDatabaseEvent)context); + break; + case LOAD_PARTITION_DONE: + // noop for now + break; + default: + break; + } + + } + + private void authorizeCreateDatabase(PreCreateDatabaseEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(new Database(context.getDatabase()), + HiveOperation.CREATEDATABASE.getInputRequiredPrivileges(), + HiveOperation.CREATEDATABASE.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (HiveException e) { + throw new MetaException(e.getMessage()); + } + } + + private void authorizeDropDatabase(PreDropDatabaseEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(new Database(context.getDatabase()), + HiveOperation.DROPDATABASE.getInputRequiredPrivileges(), + HiveOperation.DROPDATABASE.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (HiveException e) { + throw new MetaException(e.getMessage()); + } + } + + private void authorizeCreateTable(PreCreateTableEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(getTableFromApiTable(context.getTable()), + HiveOperation.CREATETABLE.getInputRequiredPrivileges(), + HiveOperation.CREATETABLE.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (HiveException e) { + throw new MetaException(e.getMessage()); + } + } + + private void authorizeDropTable(PreDropTableEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(getTableFromApiTable(context.getTable()), + HiveOperation.DROPTABLE.getInputRequiredPrivileges(), + HiveOperation.DROPTABLE.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (HiveException e) { + throw new MetaException(e.getMessage()); + } + } + + private void authorizeAlterTable(PreAlterTableEvent context) + throws InvalidOperationException, MetaException { + try { + authorizer.authorize(getTableFromApiTable(context.getOldTable()), + null, + new Privilege[]{Privilege.ALTER_METADATA}); + } catch (AuthorizationException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (HiveException e) { + throw new MetaException(e.getMessage()); + } + } + + private void authorizeAddPartition(PreAddPartitionEvent context) + throws InvalidOperationException, MetaException { + try { + org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getPartition(); + authorizer.authorize(getPartitionFromApiPartition(mapiPart, context), + HiveOperation.ALTERTABLE_ADDPARTS.getInputRequiredPrivileges(), + HiveOperation.ALTERTABLE_ADDPARTS.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (NoSuchObjectException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (HiveException e) { + throw new MetaException(e.getMessage()); + } + } + + private void authorizeDropPartition(PreDropPartitionEvent context) + throws InvalidOperationException, MetaException { + try { + org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getPartition(); + authorizer.authorize(getPartitionFromApiPartition(mapiPart, context), + HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(), + HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges()); + } catch (AuthorizationException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (NoSuchObjectException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (HiveException e) { + throw new MetaException(e.getMessage()); + } + } + + private void authorizeAlterPartition(PreAlterPartitionEvent context) + throws InvalidOperationException, MetaException { + try { + org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getNewPartition(); + authorizer.authorize(getPartitionFromApiPartition(mapiPart, context), + null, + new Privilege[]{Privilege.ALTER_METADATA}); + } catch (AuthorizationException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (NoSuchObjectException e) { + throw new InvalidOperationException(e.getMessage()); + } catch (HiveException e) { + throw new MetaException(e.getMessage()); + } + } + + + public Table getTableFromApiTable(org.apache.hadoop.hive.metastore.api.Table apiTable) { + org.apache.hadoop.hive.metastore.api.Table tTable = apiTable.deepCopy(); + if (tTable.getTableType() == null){ + // TableType specified was null, we need to figure out what type it was. + if (MetaStoreUtils.isExternalTable(tTable)){ + tTable.setTableType(TableType.EXTERNAL_TABLE.toString()); + } else if (MetaStoreUtils.isIndexTable(tTable)) { + tTable.setTableType(TableType.INDEX_TABLE.toString()); + } else if ((tTable.getSd() == null) || (tTable.getSd().getLocation() == null)) { + tTable.setTableType(TableType.VIRTUAL_VIEW.toString()); + } else { + tTable.setTableType(TableType.MANAGED_TABLE.toString()); + } + } + Table tbl = new Table(tTable); + return tbl; + } + + public Partition getPartitionFromApiPartition( + org.apache.hadoop.hive.metastore.api.Partition mapiPart, + PreEventContext context) throws HiveException, NoSuchObjectException, MetaException { + org.apache.hadoop.hive.metastore.api.Partition tPart = mapiPart.deepCopy(); + org.apache.hadoop.hive.metastore.api.Table t = context.getHandler().get_table( + mapiPart.getDbName(), mapiPart.getTableName()); + if (tPart.getSd() == null){ + tPart.setSd(t.getSd()); + } + return new Partition(getTableFromApiTable(t),tPart); + } + + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java new file mode 100644 index 0000000..2f2ceeb --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java @@ -0,0 +1,502 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.HiveObjectType; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; + +public abstract class BitSetCheckedAuthorizationProvider extends + HiveAuthorizationProviderBase { + + static class BitSetChecker { + + boolean[] inputCheck = null; + boolean[] outputCheck = null; + + public static BitSetChecker getBitSetChecker(Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) { + BitSetChecker checker = new BitSetChecker(); + if (inputRequiredPriv != null) { + checker.inputCheck = new boolean[inputRequiredPriv.length]; + for (int i = 0; i < checker.inputCheck.length; i++) { + checker.inputCheck[i] = false; + } + } + if (outputRequiredPriv != null) { + checker.outputCheck = new boolean[outputRequiredPriv.length]; + for (int i = 0; i < checker.outputCheck.length; i++) { + checker.outputCheck[i] = false; + } + } + + return checker; + } + + } + + @Override + public void authorize(Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException { + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv, + outputCheck); + checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck, null, null, null, null); + } + + @Override + public void authorize(Database db, Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException { + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + authorizeUserAndDBPriv(db, inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck); + + checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck, db.getName(), null, null, null); + } + + @Override + public void authorize(Table table, Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException { + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + authorizeUserDBAndTable(table, inputRequiredPriv, + outputRequiredPriv, inputCheck, outputCheck); + checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck, table.getDbName(), table.getTableName(), + null, null); + } + + @Override + public void authorize(Partition part, Privilege[] inputRequiredPriv, + Privilege[] outputRequiredPriv) throws HiveException { + + //if the partition does not have partition level privilege, go to table level. + Table table = part.getTable(); + if (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") == null || ("FALSE" + .equalsIgnoreCase(table.getParameters().get( + "PARTITION_LEVEL_PRIVILEGE")))) { + this.authorize(part.getTable(), inputRequiredPriv, outputRequiredPriv); + return; + } + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + if (authorizeUserDbAndPartition(part, inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck)){ + return; + } + + checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck, part.getTable().getDbName(), part + .getTable().getTableName(), part.getName(), null); + } + + @Override + public void authorize(Table table, Partition part, List columns, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv) + throws HiveException { + + BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, + outputRequiredPriv); + boolean[] inputCheck = checker.inputCheck; + boolean[] outputCheck = checker.outputCheck; + + String partName = null; + List partValues = null; + if (part != null + && (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE" + .equalsIgnoreCase(table.getParameters().get( + "PARTITION_LEVEL_PRIVILEGE"))))) { + partName = part.getName(); + partValues = part.getValues(); + } + + if (partValues == null) { + if (authorizeUserDBAndTable(table, inputRequiredPriv, outputRequiredPriv, + inputCheck, outputCheck)) { + return; + } + } else { + if (authorizeUserDbAndPartition(part, inputRequiredPriv, + outputRequiredPriv, inputCheck, outputCheck)) { + return; + } + } + + for (String col : columns) { + + BitSetChecker checker2 = BitSetChecker.getBitSetChecker( + inputRequiredPriv, outputRequiredPriv); + boolean[] inputCheck2 = checker2.inputCheck; + boolean[] outputCheck2 = checker2.outputCheck; + + PrincipalPrivilegeSet partColumnPrivileges = hive_db + .get_privilege_set(HiveObjectType.COLUMN, table.getDbName(), table.getTableName(), + partValues, col, this.getAuthenticator().getUserName(), this + .getAuthenticator().getGroupNames()); + + authorizePrivileges(partColumnPrivileges, inputRequiredPriv, inputCheck2, + outputRequiredPriv, outputCheck2); + + if (inputCheck2 != null) { + booleanArrayOr(inputCheck2, inputCheck); + } + if (outputCheck2 != null) { + booleanArrayOr(inputCheck2, inputCheck); + } + + checkAndThrowAuthorizationException(inputRequiredPriv, + outputRequiredPriv, inputCheck2, outputCheck2, table.getDbName(), + table.getTableName(), partName, col); + } + } + + protected boolean authorizeUserPriv(Privilege[] inputRequiredPriv, + boolean[] inputCheck, Privilege[] outputRequiredPriv, + boolean[] outputCheck) throws HiveException { + PrincipalPrivilegeSet privileges = hive_db.get_privilege_set( + HiveObjectType.GLOBAL, null, null, null, null, this.getAuthenticator() + .getUserName(), this.getAuthenticator().getGroupNames()); + return authorizePrivileges(privileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck); + } + + /** + * Check privileges on User and DB. This is used before doing a check on + * table/partition objects, first check the user and DB privileges. If it + * passed on this check, no need to check against the table/partition hive + * object. + * + * @param db + * @param inputRequiredPriv + * @param outputRequiredPriv + * @param inputCheck + * @param outputCheck + * @return true if the check on user and DB privilege passed, which means no + * need for privilege check on concrete hive objects. + * @throws HiveException + */ + private boolean authorizeUserAndDBPriv(Database db, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + boolean[] inputCheck, boolean[] outputCheck) throws HiveException { + if (authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv, + outputCheck)) { + return true; + } + + PrincipalPrivilegeSet dbPrivileges = hive_db.get_privilege_set( + HiveObjectType.DATABASE, db.getName(), null, null, null, this + .getAuthenticator().getUserName(), this.getAuthenticator() + .getGroupNames()); + + if (authorizePrivileges(dbPrivileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck)) { + return true; + } + + return false; + } + + /** + * Check privileges on User, DB and table objects. + * + * @param table + * @param inputRequiredPriv + * @param outputRequiredPriv + * @param inputCheck + * @param outputCheck + * @return true if the check passed + * @throws HiveException + */ + private boolean authorizeUserDBAndTable(Table table, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + boolean[] inputCheck, boolean[] outputCheck) throws HiveException { + + if (authorizeUserAndDBPriv(hive_db.getDatabase(table.getDbName()), + inputRequiredPriv, outputRequiredPriv, inputCheck, outputCheck)) { + return true; + } + + PrincipalPrivilegeSet tablePrivileges = hive_db.get_privilege_set( + HiveObjectType.TABLE, table.getDbName(), table.getTableName(), null, + null, this.getAuthenticator().getUserName(), this.getAuthenticator() + .getGroupNames()); + + if (authorizePrivileges(tablePrivileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck)) { + return true; + } + + return false; + } + + /** + * Check privileges on User, DB and table/Partition objects. + * + * @param part + * @param inputRequiredPriv + * @param outputRequiredPriv + * @param inputCheck + * @param outputCheck + * @return true if the check passed + * @throws HiveException + */ + private boolean authorizeUserDbAndPartition(Partition part, + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + boolean[] inputCheck, boolean[] outputCheck) throws HiveException { + + if (authorizeUserAndDBPriv( + hive_db.getDatabase(part.getTable().getDbName()), inputRequiredPriv, + outputRequiredPriv, inputCheck, outputCheck)) { + return true; + } + + PrincipalPrivilegeSet partPrivileges = part.getTPartition().getPrivileges(); + if (partPrivileges == null) { + partPrivileges = hive_db.get_privilege_set(HiveObjectType.PARTITION, part + .getTable().getDbName(), part.getTable().getTableName(), part + .getValues(), null, this.getAuthenticator().getUserName(), this + .getAuthenticator().getGroupNames()); + } + + if (authorizePrivileges(partPrivileges, inputRequiredPriv, inputCheck, + outputRequiredPriv, outputCheck)) { + return true; + } + + return false; + } + + protected boolean authorizePrivileges(PrincipalPrivilegeSet privileges, + Privilege[] inputPriv, boolean[] inputCheck, Privilege[] outputPriv, + boolean[] outputCheck) throws HiveException { + + boolean pass = true; + if (inputPriv != null) { + pass = pass && matchPrivs(inputPriv, privileges, inputCheck); + } + if (outputPriv != null) { + pass = pass && matchPrivs(outputPriv, privileges, outputCheck); + } + return pass; + } + + /** + * try to match an array of privileges from user/groups/roles grants. + * + * @param container + */ + private boolean matchPrivs(Privilege[] inputPriv, + PrincipalPrivilegeSet privileges, boolean[] check) { + + if (inputPriv == null) { + return true; + } + + if (privileges == null) { + return false; + } + + /* + * user grants + */ + Set privSet = new HashSet(); + if (privileges.getUserPrivileges() != null + && privileges.getUserPrivileges().size() > 0) { + Collection> privCollection = privileges.getUserPrivileges().values(); + + List userPrivs = getPrivilegeStringList(privCollection); + if (userPrivs != null && userPrivs.size() > 0) { + for (String priv : userPrivs) { + if (priv == null || priv.trim().equals("")) { + continue; + } + if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { + setBooleanArray(check, true); + return true; + } + privSet.add(priv.toLowerCase()); + } + } + } + + /* + * group grants + */ + if (privileges.getGroupPrivileges() != null + && privileges.getGroupPrivileges().size() > 0) { + Collection> groupPrivCollection = privileges + .getGroupPrivileges().values(); + List groupPrivs = getPrivilegeStringList(groupPrivCollection); + if (groupPrivs != null && groupPrivs.size() > 0) { + for (String priv : groupPrivs) { + if (priv == null || priv.trim().equals("")) { + continue; + } + if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { + setBooleanArray(check, true); + return true; + } + privSet.add(priv.toLowerCase()); + } + } + } + + /* + * roles grants + */ + if (privileges.getRolePrivileges() != null + && privileges.getRolePrivileges().size() > 0) { + Collection> rolePrivsCollection = privileges + .getRolePrivileges().values(); + ; + List rolePrivs = getPrivilegeStringList(rolePrivsCollection); + if (rolePrivs != null && rolePrivs.size() > 0) { + for (String priv : rolePrivs) { + if (priv == null || priv.trim().equals("")) { + continue; + } + if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { + setBooleanArray(check, true); + return true; + } + privSet.add(priv.toLowerCase()); + } + } + } + + for (int i = 0; i < inputPriv.length; i++) { + String toMatch = inputPriv[i].toString(); + if (!check[i]) { + check[i] = privSet.contains(toMatch.toLowerCase()); + } + } + + return firstFalseIndex(check) <0; + } + + private List getPrivilegeStringList( + Collection> privCollection) { + List userPrivs = new ArrayList(); + if (privCollection!= null && privCollection.size()>0) { + for (List grantList : privCollection) { + if (grantList == null){ + continue; + } + for (int i = 0; i < grantList.size(); i++) { + PrivilegeGrantInfo grant = grantList.get(i); + userPrivs.add(grant.getPrivilege()); + } + } + } + return userPrivs; + } + + private static void setBooleanArray(boolean[] check, boolean b) { + for (int i = 0; i < check.length; i++) { + check[i] = b; + } + } + + private static void booleanArrayOr(boolean[] output, boolean[] input) { + for (int i = 0; i < output.length && i < input.length; i++) { + output[i] = output[i] || input[i]; + } + } + + private void checkAndThrowAuthorizationException( + Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + boolean[] inputCheck, boolean[] outputCheck,String dbName, + String tableName, String partitionName, String columnName) { + + String hiveObject = "{ "; + if (dbName != null) { + hiveObject = hiveObject + "database:" + dbName; + } + if (tableName != null) { + hiveObject = hiveObject + ", table:" + tableName; + } + if (partitionName != null) { + hiveObject = hiveObject + ", partitionName:" + partitionName; + } + if (columnName != null) { + hiveObject = hiveObject + ", columnName:" + columnName; + } + hiveObject = hiveObject + "}"; + + if (inputCheck != null) { + int input = this.firstFalseIndex(inputCheck); + if (input >= 0) { + throw new AuthorizationException("No privilege '" + + inputRequiredPriv[input].toString() + "' found for inputs " + + hiveObject); + } + } + + if (outputCheck != null) { + int output = this.firstFalseIndex(outputCheck); + if (output >= 0) { + throw new AuthorizationException("No privilege '" + + outputRequiredPriv[output].toString() + "' found for outputs " + + hiveObject); + } + } + } + + private int firstFalseIndex(boolean[] inputCheck) { + if (inputCheck != null) { + for (int i = 0; i < inputCheck.length; i++) { + if (!inputCheck[i]) { + return i; + } + } + } + return -1; + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java index 66c8cd7..2fa512c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveAuthorizationProvider.java @@ -18,478 +18,16 @@ package org.apache.hadoop.hive.ql.security.authorization; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.hadoop.hive.metastore.api.Database; -import org.apache.hadoop.hive.metastore.api.HiveObjectType; -import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; -import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; -import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.ql.metadata.Partition; -import org.apache.hadoop.hive.ql.metadata.Table; public class DefaultHiveAuthorizationProvider extends - HiveAuthorizationProviderBase { - - static class BitSetChecker { - - boolean[] inputCheck = null; - boolean[] outputCheck = null; - - public static BitSetChecker getBitSetChecker(Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) { - BitSetChecker checker = new BitSetChecker(); - if (inputRequiredPriv != null) { - checker.inputCheck = new boolean[inputRequiredPriv.length]; - for (int i = 0; i < checker.inputCheck.length; i++) { - checker.inputCheck[i] = false; - } - } - if (outputRequiredPriv != null) { - checker.outputCheck = new boolean[outputRequiredPriv.length]; - for (int i = 0; i < checker.outputCheck.length; i++) { - checker.outputCheck[i] = false; - } - } - - return checker; - } - - } - - @Override - public void authorize(Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException { - - BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, - outputRequiredPriv); - boolean[] inputCheck = checker.inputCheck; - boolean[] outputCheck = checker.outputCheck; - - authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv, - outputCheck); - checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck, null, null, null, null); - } - - @Override - public void authorize(Database db, Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException { - - BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, - outputRequiredPriv); - boolean[] inputCheck = checker.inputCheck; - boolean[] outputCheck = checker.outputCheck; - - authorizeUserAndDBPriv(db, inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck); - - checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck, db.getName(), null, null, null); - } - - @Override - public void authorize(Table table, Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) throws HiveException { - BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, - outputRequiredPriv); - boolean[] inputCheck = checker.inputCheck; - boolean[] outputCheck = checker.outputCheck; - - authorizeUserDBAndTable(table, inputRequiredPriv, - outputRequiredPriv, inputCheck, outputCheck); - checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck, table.getDbName(), table.getTableName(), - null, null); - } - - @Override - public void authorize(Partition part, Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) throws HiveException { - - //if the partition does not have partition level privilege, go to table level. - Table table = part.getTable(); - if (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") == null || ("FALSE" - .equalsIgnoreCase(table.getParameters().get( - "PARTITION_LEVEL_PRIVILEGE")))) { - this.authorize(part.getTable(), inputRequiredPriv, outputRequiredPriv); - return; - } - - BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, - outputRequiredPriv); - boolean[] inputCheck = checker.inputCheck; - boolean[] outputCheck = checker.outputCheck; - - if (authorizeUserDbAndPartition(part, inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck)){ - return; - } - - checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck, part.getTable().getDbName(), part - .getTable().getTableName(), part.getName(), null); - } - - @Override - public void authorize(Table table, Partition part, List columns, - Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv) - throws HiveException { - BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, - outputRequiredPriv); - boolean[] inputCheck = checker.inputCheck; - boolean[] outputCheck = checker.outputCheck; - - String partName = null; - List partValues = null; - if (part != null - && (table.getParameters().get("PARTITION_LEVEL_PRIVILEGE") != null && ("TRUE" - .equalsIgnoreCase(table.getParameters().get( - "PARTITION_LEVEL_PRIVILEGE"))))) { - partName = part.getName(); - partValues = part.getValues(); - } - - if (partValues == null) { - if (authorizeUserDBAndTable(table, inputRequiredPriv, outputRequiredPriv, - inputCheck, outputCheck)) { - return; - } - } else { - if (authorizeUserDbAndPartition(part, inputRequiredPriv, - outputRequiredPriv, inputCheck, outputCheck)) { - return; - } - } - - for (String col : columns) { - - BitSetChecker checker2 = BitSetChecker.getBitSetChecker( - inputRequiredPriv, outputRequiredPriv); - boolean[] inputCheck2 = checker2.inputCheck; - boolean[] outputCheck2 = checker2.outputCheck; - - PrincipalPrivilegeSet partColumnPrivileges = hive_db - .get_privilege_set(HiveObjectType.COLUMN, table.getDbName(), table.getTableName(), - partValues, col, this.getAuthenticator().getUserName(), this - .getAuthenticator().getGroupNames()); - - authorizePrivileges(partColumnPrivileges, inputRequiredPriv, inputCheck2, - outputRequiredPriv, outputCheck2); - - if (inputCheck2 != null) { - booleanArrayOr(inputCheck2, inputCheck); - } - if (outputCheck2 != null) { - booleanArrayOr(inputCheck2, inputCheck); - } - - checkAndThrowAuthorizationException(inputRequiredPriv, - outputRequiredPriv, inputCheck2, outputCheck2, table.getDbName(), - table.getTableName(), partName, col); - } - } - - protected boolean authorizeUserPriv(Privilege[] inputRequiredPriv, - boolean[] inputCheck, Privilege[] outputRequiredPriv, - boolean[] outputCheck) throws HiveException { - PrincipalPrivilegeSet privileges = hive_db.get_privilege_set( - HiveObjectType.GLOBAL, null, null, null, null, this.getAuthenticator() - .getUserName(), this.getAuthenticator().getGroupNames()); - return authorizePrivileges(privileges, inputRequiredPriv, inputCheck, - outputRequiredPriv, outputCheck); - } - - /** - * Check privileges on User and DB. This is used before doing a check on - * table/partition objects, first check the user and DB privileges. If it - * passed on this check, no need to check against the table/partition hive - * object. - * - * @param db - * @param inputRequiredPriv - * @param outputRequiredPriv - * @param inputCheck - * @param outputCheck - * @return true if the check on user and DB privilege passed, which means no - * need for privilege check on concrete hive objects. - * @throws HiveException - */ - private boolean authorizeUserAndDBPriv(Database db, - Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, - boolean[] inputCheck, boolean[] outputCheck) throws HiveException { - if (authorizeUserPriv(inputRequiredPriv, inputCheck, outputRequiredPriv, - outputCheck)) { - return true; - } - - PrincipalPrivilegeSet dbPrivileges = hive_db.get_privilege_set( - HiveObjectType.DATABASE, db.getName(), null, null, null, this - .getAuthenticator().getUserName(), this.getAuthenticator() - .getGroupNames()); - - if (authorizePrivileges(dbPrivileges, inputRequiredPriv, inputCheck, - outputRequiredPriv, outputCheck)) { - return true; - } - - return false; - } - - /** - * Check privileges on User, DB and table objects. - * - * @param table - * @param inputRequiredPriv - * @param outputRequiredPriv - * @param inputCheck - * @param outputCheck - * @return true if the check passed - * @throws HiveException - */ - private boolean authorizeUserDBAndTable(Table table, - Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, - boolean[] inputCheck, boolean[] outputCheck) throws HiveException { - - if (authorizeUserAndDBPriv(hive_db.getDatabase(table.getDbName()), - inputRequiredPriv, outputRequiredPriv, inputCheck, outputCheck)) { - return true; - } - - PrincipalPrivilegeSet tablePrivileges = hive_db.get_privilege_set( - HiveObjectType.TABLE, table.getDbName(), table.getTableName(), null, - null, this.getAuthenticator().getUserName(), this.getAuthenticator() - .getGroupNames()); - - if (authorizePrivileges(tablePrivileges, inputRequiredPriv, inputCheck, - outputRequiredPriv, outputCheck)) { - return true; - } - - return false; - } - - /** - * Check privileges on User, DB and table/Partition objects. - * - * @param part - * @param inputRequiredPriv - * @param outputRequiredPriv - * @param inputCheck - * @param outputCheck - * @return true if the check passed - * @throws HiveException - */ - private boolean authorizeUserDbAndPartition(Partition part, - Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, - boolean[] inputCheck, boolean[] outputCheck) throws HiveException { - - if (authorizeUserAndDBPriv( - hive_db.getDatabase(part.getTable().getDbName()), inputRequiredPriv, - outputRequiredPriv, inputCheck, outputCheck)) { - return true; - } - - PrincipalPrivilegeSet partPrivileges = part.getTPartition().getPrivileges(); - if (partPrivileges == null) { - partPrivileges = hive_db.get_privilege_set(HiveObjectType.PARTITION, part - .getTable().getDbName(), part.getTable().getTableName(), part - .getValues(), null, this.getAuthenticator().getUserName(), this - .getAuthenticator().getGroupNames()); - } + BitSetCheckedAuthorizationProvider { - if (authorizePrivileges(partPrivileges, inputRequiredPriv, inputCheck, - outputRequiredPriv, outputCheck)) { - return true; - } - - return false; - } - - protected boolean authorizePrivileges(PrincipalPrivilegeSet privileges, - Privilege[] inputPriv, boolean[] inputCheck, Privilege[] outputPriv, - boolean[] outputCheck) throws HiveException { - - boolean pass = true; - if (inputPriv != null) { - pass = pass && matchPrivs(inputPriv, privileges, inputCheck); - } - if (outputPriv != null) { - pass = pass && matchPrivs(outputPriv, privileges, outputCheck); - } - return pass; + public void init(Configuration conf) throws HiveException { + hive_db = new HiveProxy(Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class))); } - /** - * try to match an array of privileges from user/groups/roles grants. - * - * @param container - */ - private boolean matchPrivs(Privilege[] inputPriv, - PrincipalPrivilegeSet privileges, boolean[] check) { - - if (inputPriv == null) - return true; - - if (privileges == null) - return false; - - /* - * user grants - */ - Set privSet = new HashSet(); - if (privileges.getUserPrivileges() != null - && privileges.getUserPrivileges().size() > 0) { - Collection> privCollection = privileges.getUserPrivileges().values(); - - List userPrivs = getPrivilegeStringList(privCollection); - if (userPrivs != null && userPrivs.size() > 0) { - for (String priv : userPrivs) { - if (priv == null || priv.trim().equals("")) - continue; - if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { - setBooleanArray(check, true); - return true; - } - privSet.add(priv.toLowerCase()); - } - } - } - - /* - * group grants - */ - if (privileges.getGroupPrivileges() != null - && privileges.getGroupPrivileges().size() > 0) { - Collection> groupPrivCollection = privileges - .getGroupPrivileges().values(); - List groupPrivs = getPrivilegeStringList(groupPrivCollection); - if (groupPrivs != null && groupPrivs.size() > 0) { - for (String priv : groupPrivs) { - if (priv == null || priv.trim().equals("")) - continue; - if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { - setBooleanArray(check, true); - return true; - } - privSet.add(priv.toLowerCase()); - } - } - } - - /* - * roles grants - */ - if (privileges.getRolePrivileges() != null - && privileges.getRolePrivileges().size() > 0) { - Collection> rolePrivsCollection = privileges - .getRolePrivileges().values(); - ; - List rolePrivs = getPrivilegeStringList(rolePrivsCollection); - if (rolePrivs != null && rolePrivs.size() > 0) { - for (String priv : rolePrivs) { - if (priv == null || priv.trim().equals("")) - continue; - if (priv.equalsIgnoreCase(Privilege.ALL.toString())) { - setBooleanArray(check, true); - return true; - } - privSet.add(priv.toLowerCase()); - } - } - } - - for (int i = 0; i < inputPriv.length; i++) { - String toMatch = inputPriv[i].toString(); - if (!check[i]) { - check[i] = privSet.contains(toMatch.toLowerCase()); - } - } - - return firstFalseIndex(check) <0; - } - - private List getPrivilegeStringList( - Collection> privCollection) { - List userPrivs = new ArrayList(); - if (privCollection!= null && privCollection.size()>0) { - for (List grantList : privCollection) { - if (grantList == null){ - continue; - } - for (int i = 0; i < grantList.size(); i++) { - PrivilegeGrantInfo grant = grantList.get(i); - userPrivs.add(grant.getPrivilege()); - } - } - } - return userPrivs; - } - - private static void setBooleanArray(boolean[] check, boolean b) { - for (int i = 0; i < check.length; i++) { - check[i] = b; - } - } - - private static void booleanArrayOr(boolean[] output, boolean[] input) { - for (int i = 0; i < output.length && i < input.length; i++) { - output[i] = output[i] || input[i]; - } - } - - private void checkAndThrowAuthorizationException( - Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, - boolean[] inputCheck, boolean[] outputCheck,String dbName, - String tableName, String partitionName, String columnName) { - - String hiveObject = "{ "; - if (dbName != null) { - hiveObject = hiveObject + "database:" + dbName; - } - if (tableName != null) { - hiveObject = hiveObject + ", table:" + tableName; - } - if (partitionName != null) { - hiveObject = hiveObject + ", partitionName:" + partitionName; - } - if (columnName != null) { - hiveObject = hiveObject + ", columnName:" + columnName; - } - hiveObject = hiveObject + "}"; - - if (inputCheck != null) { - int input = this.firstFalseIndex(inputCheck); - if (input >= 0) { - throw new AuthorizationException("No privilege '" - + inputRequiredPriv[input].toString() + "' found for inputs " - + hiveObject); - } - } - - if (outputCheck != null) { - int output = this.firstFalseIndex(outputCheck); - if (output >= 0) { - throw new AuthorizationException("No privilege '" - + outputRequiredPriv[output].toString() + "' found for outputs " - + hiveObject); - } - } - } - - private int firstFalseIndex(boolean[] inputCheck) { - if (inputCheck != null) { - for (int i = 0; i < inputCheck.length; i++) { - if (!inputCheck[i]) { - return i; - } - } - } - return -1; - } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java new file mode 100644 index 0000000..6685645 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java @@ -0,0 +1,39 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +public class DefaultHiveMetastoreAuthorizationProvider extends BitSetCheckedAuthorizationProvider + implements HiveMetastoreAuthorizationProvider { + + @Override + public void init(Configuration conf) throws HiveException { + hive_db = new HiveProxy(); + } + + @Override + public void setMetaStoreHandler(HMSHandler handler) { + hive_db.setHandler(handler); + } + + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java index ddb4231..795064a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java @@ -18,21 +18,90 @@ package org.apache.hadoop.hive.ql.security.authorization; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; +import org.apache.hadoop.hive.metastore.api.HiveObjectType; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.thrift.TException; public abstract class HiveAuthorizationProviderBase implements HiveAuthorizationProvider { - + + protected class HiveProxy { + + private final Hive hiveClient; + private HMSHandler handler; + + public HiveProxy(Hive hive) { + this.hiveClient = hive; + this.handler = null; + } + + public HiveProxy() { + this.hiveClient = null; + this.handler = null; + } + + public void setHandler(HMSHandler handler){ + this.handler = handler; + } + + public PrincipalPrivilegeSet get_privilege_set(HiveObjectType column, String dbName, + String tableName, List partValues, String col, String userName, + List groupNames) throws HiveException { + if (hiveClient != null) { + return hiveClient.get_privilege_set( + column, dbName, tableName, partValues, col, userName, groupNames); + } else { + HiveObjectRef hiveObj = new HiveObjectRef(column, dbName, + tableName, partValues, col); + try { + return handler.get_privilege_set(hiveObj, userName, groupNames); + } catch (MetaException e) { + throw new HiveException(e); + } catch (TException e) { + throw new HiveException(e); + } + } + } + + public Database getDatabase(String dbName) throws HiveException { + if (hiveClient != null) { + return hiveClient.getDatabase(dbName); + } else { + try { + return handler.get_database(dbName); + } catch (NoSuchObjectException e) { + throw new HiveException(e); + } catch (MetaException e) { + throw new HiveException(e); + } + } + } + + } + + protected HiveProxy hive_db; + protected HiveAuthenticationProvider authenticator; - protected Hive hive_db; - private Configuration conf; + public static final Log LOG = LogFactory.getLog( + HiveAuthenticationProvider.class); + + public void setConf(Configuration conf) { this.conf = conf; try { @@ -42,10 +111,6 @@ public abstract class HiveAuthorizationProviderBase implements } } - public void init(Configuration conf) throws HiveException { - hive_db = Hive.get(new HiveConf(conf, HiveAuthorizationProvider.class)); - } - public Configuration getConf() { return this.conf; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java new file mode 100644 index 0000000..6a4fffb --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java @@ -0,0 +1,40 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; + +/** + * HiveMetastoreAuthorizationProvider : An extension of HiveAuthorizaytionProvider + * that is intended to be called from the metastore-side. It will be invoked + * by AuthorizationPreEventListener. + * + */ +public interface HiveMetastoreAuthorizationProvider extends HiveAuthorizationProvider { + + /** + * Allows invoker of HiveMetaStoreAuthorizationProvider to send in a + * hive metastore handler that can be used to make calls to test + * whether or not authorizations can/will succeed. Intended to be called + * before any of the authorize methods are called. + * @param handler + */ + void setMetaStoreHandler(HMSHandler handler); + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java new file mode 100644 index 0000000..ab4bc05 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java @@ -0,0 +1,322 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.security.AccessControlException; +import java.util.EnumSet; +import java.util.List; + +import javax.security.auth.login.LoginException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.Warehouse; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; + +public class StorageBasedAuthorizationProvider extends HiveAuthorizationProviderBase + implements HiveMetastoreAuthorizationProvider { + + private static final String DATABASE_WAREHOUSE_SUFFIX = ".db"; + private Warehouse wh; + + @Override + public void init(Configuration conf) throws HiveException { + hive_db = new HiveProxy(); + } + + @Override + public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + // Currently not used in hive code-base, but intended to authorize actions + // that are directly user-level. As there's no storage based aspect to this, + // we can follow one of two routes: + // a) We can allow by default - that way, this call stays out of the way + // b) We can deny by default - that way, no privileges are authorized that + // is not understood and explicitly allowed. + // Both approaches have merit, but given that things like grants and revokes + // that are user-level do not make sense from the context of storage-permission + // based auth, denying seems to be more canonical here. + + throw new AuthorizationException(StorageBasedAuthorizationProvider.class.getName() + + " does not allow user-level authorization"); + } + + @Override + public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + if (db == null) { + return; + } + + Path path = getDbLocation(db); + authorize(path, readRequiredPriv, writeRequiredPriv); + } + + @Override + public void authorize(Table table, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + + if (table == null) { + return; + } + + Path path = null; + try { + if (table.getTTable().getSd().getLocation() == null + || table.getTTable().getSd().getLocation().isEmpty()) { + path = wh.getTablePath(hive_db.getDatabase(table.getDbName()), table.getTableName()); + } else { + path = table.getPath(); + } + } catch (MetaException ex) { + throw new HiveException(ex); + } + + authorize(path, readRequiredPriv, writeRequiredPriv); + } + + @Override + public void authorize(Partition part, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + if (part == null) { + return; + } + authorize(part.getTable(), part, readRequiredPriv, writeRequiredPriv); + } + + public void authorize(Table table, Partition part, Privilege[] readRequiredPriv, + Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + + if (part == null || part.getLocation() == null) { + authorize(table, readRequiredPriv, writeRequiredPriv); + } else { + authorize(part.getPartitionPath(), readRequiredPriv, writeRequiredPriv); + } + } + + @Override + public void authorize(Table table, Partition part, List columns, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + // In a simple storage-based auth, we have no information about columns + // living in different files, so we do simple partition-auth and ignore + // the columns parameter. + + if (part == null) { + return; + } + + authorize(part.getTable(), part, readRequiredPriv, writeRequiredPriv); + } + + @Override + public void setMetaStoreHandler(HMSHandler handler) { + hive_db.setHandler(handler); + this.wh = handler.getWh(); + } + + /** + * Given a privilege, return what FsActions are required + */ + protected FsAction getFsAction(Privilege priv) { + + switch (priv.getPriv()) { + case ALL: + return FsAction.READ_WRITE; + case ALTER_DATA: + return FsAction.WRITE; + case ALTER_METADATA: + return FsAction.WRITE; + case CREATE: + return FsAction.WRITE; + case DROP: + return FsAction.WRITE; + case INDEX: + return FsAction.WRITE; + case LOCK: + return FsAction.WRITE; + case SELECT: + return FsAction.READ; + case SHOW_DATABASE: + return FsAction.READ; + case UNKNOWN: + default: + throw new AuthorizationException("Unknown privilege"); + } + } + + /** + * Given a Privilege[], find out what all FsActions are required + */ + protected EnumSet getFsActions(Privilege[] privs) { + EnumSet actions = EnumSet.noneOf(FsAction.class); + + if (privs == null) { + return actions; + } + + for (Privilege priv : privs) { + actions.add(getFsAction(priv)); + } + + return actions; + } + + /** + * Authorization privileges against a path. + * + * @param path + * a filesystem path + * @param readRequiredPriv + * a list of privileges needed for inputs. + * @param writeRequiredPriv + * a list of privileges needed for outputs. + */ + public void authorize(Path path, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + try { + EnumSet actions = getFsActions(readRequiredPriv); + actions.addAll(getFsActions(writeRequiredPriv)); + if (actions.isEmpty()) { + return; + } + + checkPermissions(getConf(), path, actions); + + } catch (AccessControlException ex) { + throw new AuthorizationException(ex); + } catch (LoginException ex) { + throw new AuthorizationException(ex); + } catch (IOException ex) { + throw new HiveException(ex); + } + } + + + /** + * Checks the permissions for the given path and current user on Hadoop FS. + * If the given path does not exists, it checks for its parent folder. + */ + protected void checkPermissions(final Configuration conf, final Path path, + final EnumSet actions) throws IOException, LoginException { + + if (path == null) { + throw new IllegalArgumentException("path is null"); + } + + final FileSystem fs = path.getFileSystem(conf); + + if (fs.exists(path)) { + checkPermissions(fs, path, actions, + authenticator.getUserName(), authenticator.getGroupNames()); + } else if (path.getParent() != null) { + // find the ancestor which exists to check it's permissions + Path par = path.getParent(); + while (par != null) { + if (fs.exists(par)) { + break; + } + par = par.getParent(); + } + + checkPermissions(fs, par, actions, + authenticator.getUserName(), authenticator.getGroupNames()); + } + } + + /** + * Checks the permissions for the given path and current user on Hadoop FS. If the given path + * does not exists, it returns. + */ + @SuppressWarnings("deprecation") + protected static void checkPermissions(final FileSystem fs, final Path path, + final EnumSet actions, String user, List groups) throws IOException, + AccessControlException { + + final FileStatus stat; + + try { + stat = fs.getFileStatus(path); + } catch (FileNotFoundException fnfe) { + // File named by path doesn't exist; nothing to validate. + return; + } catch (org.apache.hadoop.fs.permission.AccessControlException ace) { + // Older hadoop version will throw this @deprecated Exception. + throw new AccessControlException(ace.getMessage()); + } + + final FsPermission dirPerms = stat.getPermission(); + final String grp = stat.getGroup(); + + for (FsAction action : actions) { + if (user.equals(stat.getOwner())) { + if (dirPerms.getUserAction().implies(action)) { + continue; + } + } + if (groups.contains(grp)) { + if (dirPerms.getGroupAction().implies(action)) { + continue; + } + } + if (dirPerms.getOtherAction().implies(action)) { + continue; + } + throw new AccessControlException("action " + action + " not permitted on path " + + path + " for user " + user); + } + } + + private Path getDefaultDatabasePath(String dbName) throws MetaException { + if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) { + return wh.getWhRoot(); + } + return new Path(wh.getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX); + } + + protected Path getDbLocation(Database db) throws HiveException { + try { + String location = db.getLocationUri(); + if (location == null) { + return getDefaultDatabasePath(db.getName()); + } else { + return wh.getDnsPath(wh.getDatabasePath(db)); + } + } catch (MetaException ex) { + throw new HiveException(ex.getMessage()); + } + } + + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 9f76dbe..f6727b7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -282,10 +282,11 @@ public class SessionState { } try { - startSs.authenticator = HiveUtils.getAuthenticator(startSs - .getConf()); - startSs.authorizer = HiveUtils.getAuthorizeProviderManager(startSs - .getConf(), startSs.authenticator); + startSs.authenticator = HiveUtils.getAuthenticator( + startSs.getConf(),HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER); + startSs.authorizer = HiveUtils.getAuthorizeProviderManager( + startSs.getConf(), HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, + startSs.authenticator); startSs.createTableGrants = CreateTableAutomaticGrant.create(startSs .getConf()); } catch (HiveException e) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java b/ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java new file mode 100644 index 0000000..195a5a4 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java @@ -0,0 +1,204 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.Privilege; + +public class DummyHiveMetastoreAuthorizationProvider implements HiveMetastoreAuthorizationProvider { + + + protected HiveAuthenticationProvider authenticator; + + public enum AuthCallContextType { + USER, + DB, + TABLE, + PARTITION, + TABLE_AND_PARTITION + }; + + class AuthCallContext { + + public AuthCallContextType type; + public List authObjects; + public Privilege[] readRequiredPriv; + public Privilege[] writeRequiredPriv; + + AuthCallContext(AuthCallContextType typeOfCall, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) { + this.type = typeOfCall; + this.authObjects = new ArrayList(); + this.readRequiredPriv = readRequiredPriv; + this.writeRequiredPriv = writeRequiredPriv; + } + AuthCallContext(AuthCallContextType typeOfCall, Object authObject, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) { + this(typeOfCall,readRequiredPriv,writeRequiredPriv); + this.authObjects.add(authObject); + } + AuthCallContext(AuthCallContextType typeOfCall, List authObjects, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) { + this(typeOfCall,readRequiredPriv,writeRequiredPriv); + this.authObjects.addAll(authObjects); + } + } + + public static final List authCalls = new ArrayList(); + + private Configuration conf; + public static final Log LOG = LogFactory.getLog( + DummyHiveMetastoreAuthorizationProvider.class);; + + @Override + public Configuration getConf() { + return this.conf; + } + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + try { + init(conf); + } catch (HiveException e) { + throw new RuntimeException(e); + } + } + + @Override + public HiveAuthenticationProvider getAuthenticator() { + return authenticator; + } + + @Override + public void setAuthenticator(HiveAuthenticationProvider authenticator) { + this.authenticator = authenticator; + } + + @Override + public void init(Configuration conf) throws HiveException { + debugLog("DHMAP.init"); + } + + @Override + public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + debugLog("DHMAP.authorize " + + "read:" + debugPrivPrint(readRequiredPriv) + + " , write:" + debugPrivPrint(writeRequiredPriv) + ); + authCalls.add(new AuthCallContext(AuthCallContextType.USER, + readRequiredPriv, writeRequiredPriv)); + } + + @Override + public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + debugLog("DHMAP.authorizedb " + + "db:" + db.getName() + + " , read:" + debugPrivPrint(readRequiredPriv) + + " , write:" + debugPrivPrint(writeRequiredPriv) + ); + authCalls.add(new AuthCallContext(AuthCallContextType.DB, + db, readRequiredPriv, writeRequiredPriv)); + } + + @Override + public void authorize(Table table, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + debugLog("DHMAP.authorizetbl " + + "tbl:" + table.getCompleteName() + + " , read:" + debugPrivPrint(readRequiredPriv) + + " , write:" + debugPrivPrint(writeRequiredPriv) + ); + authCalls.add(new AuthCallContext(AuthCallContextType.TABLE, + table, readRequiredPriv, writeRequiredPriv)); + + } + + @Override + public void authorize(Partition part, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + throws HiveException, AuthorizationException { + debugLog("DHMAP.authorizepart " + + "tbl:" + part.getTable().getCompleteName() + + " , part: " + part.getName() + + " , read:" + debugPrivPrint(readRequiredPriv) + + " , write:" + debugPrivPrint(writeRequiredPriv) + ); + authCalls.add(new AuthCallContext(AuthCallContextType.PARTITION, + part, readRequiredPriv, writeRequiredPriv)); + + } + + @Override + public void authorize(Table table, Partition part, List columns, + Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException, + AuthorizationException { + debugLog("DHMAP.authorizecols " + + "tbl:" + table.getCompleteName() + + " , part: " + part.getName() + + " . cols: " + columns.toString() + + " , read:" + debugPrivPrint(readRequiredPriv) + + " , write:" + debugPrivPrint(writeRequiredPriv) + ); + List authObjects = new ArrayList(); + authObjects.add(table); + authObjects.add(part); + authCalls.add(new AuthCallContext(AuthCallContextType.TABLE_AND_PARTITION, + authObjects, readRequiredPriv, writeRequiredPriv)); + + } + + private void debugLog(String s) { + LOG.debug(s); + } + + private String debugPrivPrint(Privilege[] privileges) { + StringBuffer sb = new StringBuffer(); + sb.append("Privileges{"); + if (privileges != null){ + for (Privilege p : privileges){ + sb.append(p.toString()); + } + }else{ + sb.append("null"); + } + sb.append("}"); + return sb.toString(); + } + + @Override + public void setMetaStoreHandler(HMSHandler handler) { + debugLog("DHMAP.setMetaStoreHandler"); + } + + +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java b/ql/src/test/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java new file mode 100644 index 0000000..2dd225e --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java @@ -0,0 +1,105 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security; + +import java.util.List; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.ql.metadata.HiveException; + +/** + * + * InjectableDummyAuthenticator - An implementation of HiveMetastoreAuthenticationProvider + * that wraps another Authenticator, but when asked to inject a user provided username + * and groupnames, does so. This can be toggled back and forth to use in testing + */ +public class InjectableDummyAuthenticator implements HiveMetastoreAuthenticationProvider { + + private static String userName; + private static List groupNames; + private static boolean injectMode; + private static Class hmapClass = + HadoopDefaultMetastoreAuthenticator.class; + private HiveMetastoreAuthenticationProvider hmap; + + public static void injectHmapClass(Class clazz){ + hmapClass = clazz; + } + + public static void injectUserName(String user){ + userName = user; + } + + public static void injectGroupNames(List groups){ + groupNames = groups; + } + + public static void injectMode(boolean mode){ + injectMode = mode; + } + + @Override + public String getUserName() { + if (injectMode){ + return userName; + } else { + return hmap.getUserName(); + } + } + + @Override + public List getGroupNames() { + if (injectMode) { + return groupNames; + } else { + return hmap.getGroupNames(); + } + } + + @Override + public Configuration getConf() { + return hmap.getConf(); + } + + @Override + public void setConf(Configuration config) { + try { + hmap = (HiveMetastoreAuthenticationProvider) hmapClass.newInstance(); + } catch (InstantiationException e) { + throw new RuntimeException("Whoops, could not create an Authenticator of class " + + hmapClass.getName()); + } catch (IllegalAccessException e) { + throw new RuntimeException("Whoops, could not create an Authenticator of class " + + hmapClass.getName()); + } + + hmap.setConf(config); + } + + @Override + public void setMetaStoreHandler(HMSHandler handler) { + hmap.setMetaStoreHandler(handler); + } + + @Override + public void destroy() throws HiveException { + hmap.destroy(); + } + +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java b/ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java new file mode 100644 index 0000000..55e07f5 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java @@ -0,0 +1,307 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import java.util.ArrayList; +import java.util.List; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStore; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.security.DummyHiveMetastoreAuthorizationProvider.AuthCallContext; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * TestAuthorizationPreEventListener. Test case for + * {@link org.apache.hadoop.hive.metastore.AuthorizationPreEventListener} and + * {@link org.apache.hadoop.hive.metastore.MetaStorePreEventListener} + */ +public class TestAuthorizationPreEventListener extends TestCase { + private static final String msPort = "20001"; + private HiveConf clientHiveConf; + private HiveMetaStoreClient msc; + private Driver driver; + + private static class RunMS implements Runnable { + + @Override + public void run() { + try { + HiveMetaStore.main(new String[]{msPort}); + } catch (Throwable e) { + e.printStackTrace(System.err); + assert false; + } + } + } + + @Override + protected void setUp() throws Exception { + + super.setUp(); + System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, + AuthorizationPreEventListener.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_ENABLED.varname,"true"); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname, + DummyHiveMetastoreAuthorizationProvider.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname, + HadoopDefaultMetastoreAuthenticator.class.getName()); + + + Thread t = new Thread(new RunMS()); + t.start(); + Thread.sleep(40000); + + clientHiveConf = new HiveConf(this.getClass()); + + clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort); + clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3); + clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + + clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); + clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); + + + SessionState.start(new CliSessionState(clientHiveConf)); + msc = new HiveMetaStoreClient(clientHiveConf, null); + driver = new Driver(clientHiveConf); + } + + @Override + protected void tearDown() throws Exception { + super.tearDown(); + } + + private void validateCreateDb(Database expectedDb, Database actualDb) { + assertEquals(expectedDb.getName(), actualDb.getName()); + assertEquals(expectedDb.getLocationUri(), actualDb.getLocationUri()); + } + + private void validateTable(Table expectedTable, Table actualTable) { + assertEquals(expectedTable.getTableName(), actualTable.getTableName()); + assertEquals(expectedTable.getDbName(), actualTable.getDbName()); + + // We won't try to be too strict in checking this because we're comparing + // table create intents with observed tables created. + // If it does have a location though, we will compare, as with external tables + if ((actualTable.getSd() != null) && (actualTable.getSd().getLocation() != null)){ + assertEquals(expectedTable.getSd().getLocation(), actualTable.getSd().getLocation()); + } + } + + private void validateCreateTable(Table expectedTable, Table actualTable) { + validateTable(expectedTable, actualTable); + } + + private void validateAddPartition(Partition expectedPartition, Partition actualPartition) { + validatePartition(expectedPartition,actualPartition); + } + + private void validatePartition(Partition expectedPartition, Partition actualPartition) { + assertEquals(expectedPartition.getValues(), + actualPartition.getValues()); + assertEquals(expectedPartition.getDbName(), + actualPartition.getDbName()); + assertEquals(expectedPartition.getTableName(), + actualPartition.getTableName()); + + // assertEquals(expectedPartition.getSd().getLocation(), + // actualPartition.getSd().getLocation()); + // we don't compare locations, because the location can still be empty in + // the pre-event listener before it is created. + + assertEquals(expectedPartition.getSd().getInputFormat(), + actualPartition.getSd().getInputFormat()); + assertEquals(expectedPartition.getSd().getOutputFormat(), + actualPartition.getSd().getOutputFormat()); + assertEquals(expectedPartition.getSd().getSerdeInfo(), + actualPartition.getSd().getSerdeInfo()); + + } + + private void validateAlterPartition(Partition expectedOldPartition, + Partition expectedNewPartition, String actualOldPartitionDbName, + String actualOldPartitionTblName,List actualOldPartitionValues, + Partition actualNewPartition) { + assertEquals(expectedOldPartition.getValues(), actualOldPartitionValues); + assertEquals(expectedOldPartition.getDbName(), actualOldPartitionDbName); + assertEquals(expectedOldPartition.getTableName(), actualOldPartitionTblName); + + validatePartition(expectedNewPartition, actualNewPartition); + } + + private void validateAlterTable(Table expectedOldTable, Table expectedNewTable, + Table actualOldTable, Table actualNewTable) { + validateTable(expectedOldTable, actualOldTable); + validateTable(expectedNewTable, actualNewTable); + } + + private void validateDropPartition(Partition expectedPartition, Partition actualPartition) { + validatePartition(expectedPartition, actualPartition); + } + + private void validateDropTable(Table expectedTable, Table actualTable) { + validateTable(expectedTable, actualTable); + } + + private void validateDropDb(Database expectedDb, Database actualDb) { + assertEquals(expectedDb, actualDb); + } + + public void testListener() throws Exception { + String dbName = "tmpdb"; + String tblName = "tmptbl"; + String renamed = "tmptbl2"; + int listSize = 0; + + List authCalls = DummyHiveMetastoreAuthorizationProvider.authCalls; + assertEquals(authCalls.size(),listSize); + + driver.run("create database " + dbName); + listSize++; + Database db = msc.getDatabase(dbName); + + Database dbFromEvent = (Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB); + validateCreateDb(db,dbFromEvent); + + driver.run("use " + dbName); + driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName)); + listSize++; + Table tbl = msc.getTable(dbName, tblName); + + Table tblFromEvent = ( + (org.apache.hadoop.hive.ql.metadata.Table) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE)) + .getTTable(); + validateCreateTable(tbl, tblFromEvent); + + driver.run("alter table tmptbl add partition (b='2011')"); + listSize++; + Partition part = msc.getPartition("tmpdb", "tmptbl", "b=2011"); + + Partition ptnFromEvent = ( + (org.apache.hadoop.hive.ql.metadata.Partition) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION)) + .getTPartition(); + validateAddPartition(part,ptnFromEvent); + + driver.run(String.format("alter table %s touch partition (%s)", tblName, "b='2011'")); + listSize++; + + //the partition did not change, + // so the new partition should be similar to the original partition + Partition modifiedP = msc.getPartition(dbName, tblName, "b=2011"); + + Partition ptnFromEventAfterAlter = ( + (org.apache.hadoop.hive.ql.metadata.Partition) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION)) + .getTPartition(); + + validateAlterPartition(part, modifiedP, ptnFromEventAfterAlter.getDbName(), + ptnFromEventAfterAlter.getTableName(), ptnFromEventAfterAlter.getValues(), + ptnFromEventAfterAlter); + + + List part_vals = new ArrayList(); + part_vals.add("c=2012"); + Partition newPart = msc.appendPartition(dbName, tblName, part_vals); + + listSize++; + + Partition newPtnFromEvent = ( + (org.apache.hadoop.hive.ql.metadata.Partition) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION)) + .getTPartition(); + validateAddPartition(newPart,newPtnFromEvent); + + + driver.run(String.format("alter table %s rename to %s", tblName, renamed)); + listSize++; + + Table renamedTable = msc.getTable(dbName, renamed); + Table renamedTableFromEvent = ( + (org.apache.hadoop.hive.ql.metadata.Table) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE)) + .getTTable(); + + validateAlterTable(tbl, renamedTable, renamedTableFromEvent, + renamedTable); + assertFalse(tbl.getTableName().equals(renamedTable.getTableName())); + + + //change the table name back + driver.run(String.format("alter table %s rename to %s", renamed, tblName)); + listSize++; + + driver.run(String.format("alter table %s drop partition (b='2011')", tblName)); + listSize++; + + Partition ptnFromDropPartition = ( + (org.apache.hadoop.hive.ql.metadata.Partition) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION)) + .getTPartition(); + + validateDropPartition(modifiedP, ptnFromDropPartition); + + driver.run("drop table " + tblName); + listSize++; + Table tableFromDropTableEvent = ( + (org.apache.hadoop.hive.ql.metadata.Table) + assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE)) + .getTTable(); + + + validateDropTable(tbl, tableFromDropTableEvent); + + driver.run("drop database " + dbName); + listSize++; + Database dbFromDropDatabaseEvent = + (Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB); + + validateDropDb(db, dbFromDropDatabaseEvent); + } + + public Object assertAndExtractSingleObjectFromEvent(int listSize, + List authCalls, + DummyHiveMetastoreAuthorizationProvider.AuthCallContextType callType) { + assertEquals(listSize, authCalls.size()); + assertEquals(1,authCalls.get(listSize-1).authObjects.size()); + + assertEquals(callType,authCalls.get(listSize-1).type); + return (authCalls.get(listSize-1).authObjects.get(0)); + } + +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/TestDefaultHiveMetastoreAuthorizationProvider.java b/ql/src/test/org/apache/hadoop/hive/ql/security/TestDefaultHiveMetastoreAuthorizationProvider.java new file mode 100644 index 0000000..8433f8f --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/security/TestDefaultHiveMetastoreAuthorizationProvider.java @@ -0,0 +1,175 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import java.util.ArrayList; +import java.util.List; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStore; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; +import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.security.UserGroupInformation; + +/** + * TestDefaultHiveMetaStoreAuthorizationProvider. Test case for + * DefaultHiveMetastoreAuthorizationProvider + * using {@link org.apache.hadoop.hive.metastore.AuthorizationPreEventListener} + * + * Note that while we do use the hive driver to test, that is mostly for test + * writing ease, and it has the same effect as using a metastore client directly + * because we disable hive client-side authorization for this test, and only + * turn on server-side auth. + */ +public class TestDefaultHiveMetastoreAuthorizationProvider extends TestCase { + private static final String msPort = "20001"; + private HiveConf clientHiveConf; + private HiveMetaStoreClient msc; + private Driver driver; + private UserGroupInformation ugi; + + private static class RunMS implements Runnable { + + @Override + public void run() { + try { + HiveMetaStore.main(new String[]{msPort}); + } catch (Throwable e) { + e.printStackTrace(System.err); + assert false; + } + } + } + + @Override + protected void setUp() throws Exception { + + super.setUp(); + System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, + AuthorizationPreEventListener.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_ENABLED.varname,"true"); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname, + DefaultHiveMetastoreAuthorizationProvider.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname, + InjectableDummyAuthenticator.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS.varname, ""); + + + Thread t = new Thread(new RunMS()); + t.start(); + Thread.sleep(40000); + + clientHiveConf = new HiveConf(this.getClass()); + + clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED,false); + + clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort); + clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3); + clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + + clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); + clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); + + ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf); + + SessionState.start(new CliSessionState(clientHiveConf)); + msc = new HiveMetaStoreClient(clientHiveConf, null); + driver = new Driver(clientHiveConf); + } + + @Override + protected void tearDown() throws Exception { + super.tearDown(); + } + + private void validateCreateDb(Database expectedDb, String dbName) { + assertEquals(expectedDb.getName(), dbName); + } + + private void validateCreateTable(Table expectedTable, String tblName, String dbName) { + assertNotNull(expectedTable); + assertEquals(expectedTable.getTableName(),tblName); + assertEquals(expectedTable.getDbName(),dbName); + } + + public void testSimplePrivileges() throws Exception { + String dbName = "smpdb"; + String tblName = "smptbl"; + + String userName = ugi.getUserName(); + + CommandProcessorResponse ret = driver.run("create database " + dbName); + assertEquals(0,ret.getResponseCode()); + Database db = msc.getDatabase(dbName); + + validateCreateDb(db,dbName); + + driver.run("use " + dbName); + ret = driver.run( + String.format("create table %s (a string) partitioned by (b string)", tblName)); + + assertEquals(1,ret.getResponseCode()); + // failure from not having permissions to create table + + driver.run("grant create on database "+dbName+" to user "+userName); + + driver.run("use " + dbName); + ret = driver.run( + String.format("create table %s (a string) partitioned by (b string)", tblName)); + + assertEquals(0,ret.getResponseCode()); // now it succeeds. + Table tbl = msc.getTable(dbName, tblName); + + validateCreateTable(tbl,tblName, dbName); + + String fakeUser = "mal"; + List fakeGroupNames = new ArrayList(); + fakeGroupNames.add("groupygroup"); + + InjectableDummyAuthenticator.injectUserName(fakeUser); + InjectableDummyAuthenticator.injectGroupNames(fakeGroupNames); + InjectableDummyAuthenticator.injectMode(true); + + ret = driver.run( + String.format("create table %s (a string) partitioned by (b string)", tblName+"mal")); + + assertEquals(1,ret.getResponseCode()); + + + ret = driver.run("alter table "+tblName+" add partition (b='2011')"); + assertEquals(1,ret.getResponseCode()); + + InjectableDummyAuthenticator.injectMode(false); + + ret = driver.run("alter table "+tblName+" add partition (b='2011')"); + assertEquals(0,ret.getResponseCode()); + + } + +}