Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMultiPartitionAuthorizationProviderBase.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMultiPartitionAuthorizationProviderBase.java (revision ) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMultiPartitionAuthorizationProviderBase.java (revision ) @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization; + +import org.apache.hadoop.hive.ql.metadata.AuthorizationException; +import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.metadata.Partition; +import org.apache.hadoop.hive.ql.metadata.Table; + +/** + * Abstract class that allows authorization of operations on partition-sets. + */ +public abstract class HiveMultiPartitionAuthorizationProviderBase extends HiveAuthorizationProviderBase { + + /** + * Authorization method for partition sets. + * @param table The table in question + * @param partitions An Iterable representing the partition-set + * @param requiredReadPrivileges Read-privileges required + * @param requiredWritePrivileges Write-privileges required + * @throws HiveException + * @throws AuthorizationException + */ + public abstract void authorize(Table table, Iterable partitions, + Privilege[] requiredReadPrivileges, Privilege[] requiredWritePrivileges) + throws HiveException, AuthorizationException; +} Index: ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java (date 1424398302000) +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java (revision ) @@ -21,6 +21,8 @@ import java.util.Iterator; import java.util.List; +import com.google.common.base.Function; +import com.google.common.collect.Iterators; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -323,18 +325,56 @@ } } + private void authorizeDropMultiPartition(HiveMultiPartitionAuthorizationProviderBase authorizer, + final PreDropPartitionEvent context) + throws AuthorizationException, HiveException { + Iterator partitionIterator = context.getPartitionIterator(); + + final TableWrapper table = new TableWrapper(context.getTable()); + final Iterator qlPartitionIterator = + Iterators.transform(partitionIterator, new Function() { + @Override + public org.apache.hadoop.hive.ql.metadata.Partition apply(Partition partition) { + try { + return new PartitionWrapper(table, partition); + } catch (Exception exception) { + LOG.error("Could not construct partition-object for: " + partition, exception); + throw new RuntimeException(exception); + } + } + }); + + authorizer.authorize(new TableWrapper(context.getTable()), + new Iterable() { + @Override + public Iterator iterator() { + return qlPartitionIterator; + } + }, + HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(), + HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges()); + } + private void authorizeDropPartition(PreDropPartitionEvent context) throws InvalidOperationException, MetaException { try { - Iterator partitionIterator = context.getPartitionIterator(); + for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { + if (authorizer instanceof HiveMultiPartitionAuthorizationProviderBase) { + // Authorize all dropped-partitions in one shot. + authorizeDropMultiPartition((HiveMultiPartitionAuthorizationProviderBase)authorizer, context); + } + else { + // Authorize individually. - TableWrapper table = new TableWrapper(context.getTable()); + TableWrapper table = new TableWrapper(context.getTable()); + Iterator partitionIterator = context.getPartitionIterator(); - while (partitionIterator.hasNext()) { + while (partitionIterator.hasNext()) { - org.apache.hadoop.hive.metastore.api.Partition mapiPart = partitionIterator.next(); - org.apache.hadoop.hive.ql.metadata.Partition wrappedPartition = new PartitionWrapper(table, mapiPart); - for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { - authorizer.authorize(wrappedPartition, + authorizer.authorize( + new PartitionWrapper(table, partitionIterator.next()), - HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(), + HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(), - HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges()); + HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges() + ); + } + } } } catch (AuthorizationException e) {