diff --git a/conf/hive-default.xml.template b/conf/hive-default.xml.template
index 49a099b..877d875 100644
--- a/conf/hive-default.xml.template
+++ b/conf/hive-default.xml.template
@@ -1618,8 +1618,9 @@
hive.security.metastore.authorization.manager
org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider
- authorization manager class name to be used in the metastore for authorization.
- The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider.
+ Names of authorization manager classes (comma separated) to be used in the metastore for authorization.
+ The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider.
+ All authorization manager classes have to successfully authorize the metastore api call for the command execution to be allowed.
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java
new file mode 100644
index 0000000..6b2f28e
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthorizationApiAuthorizer.java
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+
+import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.junit.Test;
+
+/**
+ * Test case for {@link MetaStoreAuthzAPIAuthorizerEmbedOnly} The authorizer is
+ * supposed to allow api calls for metastore in embedded mode while disallowing
+ * them in remote metastore mode. Note that this is an abstract class, the
+ * subclasses that set the mode and the tests here get run as part of their
+ * testing.
+ */
+public abstract class TestAuthorizationApiAuthorizer {
+ protected static boolean isRemoteMetastoreMode;
+ private static HiveConf hiveConf;
+ private static HiveMetaStoreClient msc;
+
+ protected static void setup() throws Exception {
+ System.err.println("Running with remoteMode = " + isRemoteMetastoreMode);
+ System.setProperty("hive.metastore.pre.event.listeners",
+ AuthorizationPreEventListener.class.getName());
+ System.setProperty("hive.security.metastore.authorization.manager",
+ MetaStoreAuthzAPIAuthorizerEmbedOnly.class.getName());
+
+ hiveConf = new HiveConf();
+ if (isRemoteMetastoreMode) {
+ int port = MetaStoreUtils.findFreePort();
+ MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
+ hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
+ }
+ hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
+ hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+ hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+ hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+ msc = new HiveMetaStoreClient(hiveConf, null);
+
+ }
+
+ interface FunctionInvoker {
+ public void invoke() throws Exception;
+ }
+
+ /**
+ * Test the if authorization failed/passed for FunctionInvoker that invokes a metastore client
+ * api call
+ * @param mscFunctionInvoker
+ * @throws Exception
+ */
+ private void testFunction(FunctionInvoker mscFunctionInvoker) throws Exception {
+ boolean caughtEx = false;
+ try {
+ try {
+ mscFunctionInvoker.invoke();
+ } catch (RuntimeException e) {
+ // A hack to verify that authorization check passed. Exception can be thrown be cause
+ // the functions are not being called with valid params.
+ // verify that exception has come from ObjectStore code, which means that the
+ // authorization checks passed.
+ String exStackString = ExceptionUtils.getStackTrace(e);
+ assertTrue("Verifying this exception came after authorization check",
+ exStackString.contains("org.apache.hadoop.hive.metastore.ObjectStore"));
+ // If its not an exception caused by auth check, ignore it
+ }
+ assertFalse("Authz Exception should have been thrown in remote mode", isRemoteMetastoreMode);
+ System.err.println("No auth exception thrown");
+ } catch (MetaException e) {
+ System.err.println("Caught exception");
+ caughtEx = true;
+ assertTrue(e.getMessage().contains(MetaStoreAuthzAPIAuthorizerEmbedOnly.errMsg));
+ }
+ if (!isRemoteMetastoreMode) {
+ assertFalse("No exception should be thrown in embedded mode", caughtEx);
+ }
+ }
+
+ @Test
+ public void testGrantPriv() throws Exception {
+ FunctionInvoker invoker = new FunctionInvoker() {
+ @Override
+ public void invoke() throws Exception {
+ msc.grant_privileges(new PrivilegeBag(new ArrayList()));
+ }
+ };
+ testFunction(invoker);
+ }
+
+ @Test
+ public void testRevokePriv() throws Exception {
+ FunctionInvoker invoker = new FunctionInvoker() {
+ @Override
+ public void invoke() throws Exception {
+ msc.revoke_privileges(new PrivilegeBag(new ArrayList()));
+ }
+ };
+ testFunction(invoker);
+ }
+
+ @Test
+ public void testGrantRole() throws Exception {
+ FunctionInvoker invoker = new FunctionInvoker() {
+ @Override
+ public void invoke() throws Exception {
+ msc.grant_role(null, null, null, null, null, true);
+ }
+ };
+ testFunction(invoker);
+ }
+
+ @Test
+ public void testRevokeRole() throws Exception {
+ FunctionInvoker invoker = new FunctionInvoker() {
+ @Override
+ public void invoke() throws Exception {
+ msc.revoke_role(null, null, null);
+ }
+ };
+ testFunction(invoker);
+ }
+
+ @Test
+ public void testCreateRole() throws Exception {
+ FunctionInvoker invoker = new FunctionInvoker() {
+ @Override
+ public void invoke() throws Exception {
+ msc.create_role(new Role());
+ }
+ };
+ testFunction(invoker);
+ }
+
+ @Test
+ public void testDropRole() throws Exception {
+ FunctionInvoker invoker = new FunctionInvoker() {
+ @Override
+ public void invoke() throws Exception {
+ msc.drop_role(null);
+ }
+ };
+ testFunction(invoker);
+ }
+
+ @Test
+ public void testListRoles() throws Exception {
+ FunctionInvoker invoker = new FunctionInvoker() {
+ @Override
+ public void invoke() throws Exception {
+ msc.list_roles(null, null);
+ }
+ };
+ testFunction(invoker);
+ }
+
+ @Test
+ public void testGetPrivSet() throws Exception {
+ FunctionInvoker invoker = new FunctionInvoker() {
+ @Override
+ public void invoke() throws Exception {
+ msc.get_privilege_set(new HiveObjectRef(), null, new ArrayList());
+ }
+ };
+ testFunction(invoker);
+ }
+
+ @Test
+ public void testListPriv() throws Exception {
+ FunctionInvoker invoker = new FunctionInvoker() {
+ @Override
+ public void invoke() throws Exception {
+ msc.list_privileges(null, PrincipalType.USER, new HiveObjectRef());
+ }
+ };
+ testFunction(invoker);
+ }
+
+
+
+}
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java
new file mode 100644
index 0000000..b7d3cfa
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInEmbed.java
@@ -0,0 +1,16 @@
+package org.apache.hadoop.hive.metastore;
+
+import org.junit.BeforeClass;
+
+/**
+ * Test {@link TestAuthorizationApiAuthorizer} in embedded mode of metastore
+ */
+public class TestAuthzApiEmbedAuthorizerInEmbed extends TestAuthorizationApiAuthorizer {
+
+ @BeforeClass
+ public static void setup() throws Exception {
+ isRemoteMetastoreMode = false; // embedded metastore mode
+ TestAuthorizationApiAuthorizer.setup();
+ }
+
+}
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java
new file mode 100644
index 0000000..d775e72
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestAuthzApiEmbedAuthorizerInRemote.java
@@ -0,0 +1,16 @@
+package org.apache.hadoop.hive.metastore;
+
+import org.junit.BeforeClass;
+
+/**
+ * Test {@link TestAuthorizationApiAuthorizer} in remote mode of metastore
+ */
+public class TestAuthzApiEmbedAuthorizerInRemote extends TestAuthorizationApiAuthorizer {
+
+ @BeforeClass
+ public static void setup() throws Exception {
+ isRemoteMetastoreMode = true; // remote metastore mode
+ TestAuthorizationApiAuthorizer.setup();
+ }
+
+}
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
index dc6e9b0..fff1ed2 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hive.ql.security;
-import java.io.IOException;
-import java.net.ServerSocket;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -81,14 +79,6 @@ protected void setUp() throws Exception {
driver = new Driver(clientHiveConf);
}
- private static String getFreeAvailablePort() throws IOException {
- ServerSocket socket = new ServerSocket(0);
- socket.setReuseAddress(true);
- int port = socket.getLocalPort();
- socket.close();
- return "" + port;
- }
-
@Override
protected void tearDown() throws Exception {
super.tearDown();
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java
new file mode 100644
index 0000000..d98f599
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.security.DummyHiveMetastoreAuthorizationProvider.AuthCallContext;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test case for verifying that multiple
+ * {@link org.apache.hadoop.hive.metastore.AuthorizationPreEventListener}s can
+ * be set and they get called.
+ */
+public class TestMultiAuthorizationPreEventListener {
+ private static HiveConf clientHiveConf;
+ private static HiveMetaStoreClient msc;
+ private static Driver driver;
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+
+
+ int port = MetaStoreUtils.findFreePort();
+
+ System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname,
+ AuthorizationPreEventListener.class.getName());
+
+ // Set two dummy classes as authorizatin managers. Two instances should get created.
+ System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname,
+ DummyHiveMetastoreAuthorizationProvider.class.getName() + ","
+ + DummyHiveMetastoreAuthorizationProvider.class.getName());
+
+ System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname,
+ HadoopDefaultMetastoreAuthenticator.class.getName());
+
+ MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
+
+ clientHiveConf = new HiveConf();
+
+ clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
+ clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+ SessionState.start(new CliSessionState(clientHiveConf));
+ msc = new HiveMetaStoreClient(clientHiveConf, null);
+ driver = new Driver(clientHiveConf);
+ }
+
+ @Test
+ public void testMultipleAuthorizationListners() throws Exception {
+ String dbName = "hive" + this.getClass().getSimpleName().toLowerCase();
+ List authCalls = DummyHiveMetastoreAuthorizationProvider.authCalls;
+ int listSize = 0;
+ assertEquals(listSize, authCalls.size());
+
+ driver.run("create database " + dbName);
+ // verify that there are two calls because of two instances of the authorization provider
+ listSize = 2;
+ assertEquals(listSize, authCalls.size());
+
+ // verify that the actual action also went through
+ Database db = msc.getDatabase(dbName);
+ Database dbFromEvent = (Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls,
+ DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB);
+ validateCreateDb(db,dbFromEvent);
+ }
+
+ public Object assertAndExtractSingleObjectFromEvent(int listSize,
+ List authCalls,
+ DummyHiveMetastoreAuthorizationProvider.AuthCallContextType callType) {
+ assertEquals(listSize, authCalls.size());
+ assertEquals(1,authCalls.get(listSize-1).authObjects.size());
+
+ assertEquals(callType,authCalls.get(listSize-1).type);
+ return (authCalls.get(listSize-1).authObjects.get(0));
+ }
+
+
+ private void validateCreateDb(Database expectedDb, Database actualDb) {
+ assertEquals(expectedDb.getName(), actualDb.getName());
+ assertEquals(expectedDb.getLocationUri(), actualDb.getLocationUri());
+ }
+
+
+}
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
index 195a5a4..ed4b441 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
@@ -33,6 +33,10 @@
import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider;
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
+/**
+ * Dummy implementation for use by unit tests. Tracks the context of calls made to
+ * its authorize functions in {@link AuthCallContext}
+ */
public class DummyHiveMetastoreAuthorizationProvider implements HiveMetastoreAuthorizationProvider {
@@ -43,7 +47,8 @@
DB,
TABLE,
PARTITION,
- TABLE_AND_PARTITION
+ TABLE_AND_PARTITION,
+ AUTHORIZATION
};
class AuthCallContext {
@@ -200,5 +205,12 @@ public void setMetaStoreHandler(HMSHandler handler) {
debugLog("DHMAP.setMetaStoreHandler");
}
+ @Override
+ public void authorizeAuthorizationApiInvocation() throws HiveException, AuthorizationException {
+ debugLog("DHMAP.authorizeauthapi");
+ authCalls.add(new AuthCallContext(AuthCallContextType.AUTHORIZATION, null, null));
+ }
+
+
}
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 487d292..acef599 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -92,7 +92,6 @@
import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
import org.apache.hadoop.hive.metastore.api.HiveObjectType;
import org.apache.hadoop.hive.metastore.api.Index;
-import org.apache.hadoop.hive.metastore.api.IndexAlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
@@ -148,6 +147,7 @@
import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent;
import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreAuthorizationCallEvent;
import org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent;
import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent;
import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent;
@@ -194,6 +194,11 @@
public class HiveMetaStore extends ThriftHiveMetastore {
public static final Log LOG = LogFactory.getLog(HiveMetaStore.class);
+ // boolean that tells if the HiveMetaStore (remote) server is being used.
+ // Can be used to determine if the calls to metastore api (HMSHandler) are being made with
+ // embedded metastore or a remote one
+ private static boolean isMetaStoreRemote = false;
+
/** A fixed date format to be used for hive partition column values. */
public static final DateFormat PARTITION_DATE_FORMAT;
static {
@@ -621,7 +626,7 @@ private void logInfo(String m) {
logAuditEvent(m);
}
- public String startFunction(String function, String extraLogInfo) {
+ private String startFunction(String function, String extraLogInfo) {
incrementCounter(function);
logInfo((getIpAddress() == null ? "" : "source:" + getIpAddress() + " ") +
function + extraLogInfo);
@@ -634,26 +639,26 @@ public String startFunction(String function, String extraLogInfo) {
return function;
}
- public String startFunction(String function) {
+ private String startFunction(String function) {
return startFunction(function, "");
}
- public String startTableFunction(String function, String db, String tbl) {
+ private String startTableFunction(String function, String db, String tbl) {
return startFunction(function, " : db=" + db + " tbl=" + tbl);
}
- public String startMultiTableFunction(String function, String db, List tbls) {
+ private String startMultiTableFunction(String function, String db, List tbls) {
String tableNames = join(tbls, ",");
return startFunction(function, " : db=" + db + " tbls=" + tableNames);
}
- public String startPartitionFunction(String function, String db, String tbl,
+ private String startPartitionFunction(String function, String db, String tbl,
List partVals) {
return startFunction(function, " : db=" + db + " tbl=" + tbl
+ "[" + join(partVals, ",") + "]");
}
- public String startPartitionFunction(String function, String db, String tbl,
+ private String startPartitionFunction(String function, String db, String tbl,
Map partName) {
return startFunction(function, " : db=" + db + " tbl=" + tbl + "partition=" + partName);
}
@@ -661,12 +666,12 @@ public String startPartitionFunction(String function, String db, String tbl,
private void endFunction(String function, boolean successful, Exception e) {
endFunction(function, successful, e, null);
}
- public void endFunction(String function, boolean successful, Exception e,
+ private void endFunction(String function, boolean successful, Exception e,
String inputTableName) {
endFunction(function, new MetaStoreEndFunctionContext(successful, e, inputTableName));
}
- public void endFunction(String function, MetaStoreEndFunctionContext context) {
+ private void endFunction(String function, MetaStoreEndFunctionContext context) {
try {
Metrics.endScope(function);
} catch (IOException e) {
@@ -1653,13 +1658,6 @@ public Table get_table(final String dbname, final String name) throws MetaExcept
return tables;
}
- public boolean set_table_parameters(String dbname, String name,
- Map params) throws NoSuchObjectException, MetaException {
- endFunction(startTableFunction("set_table_parameters", dbname, name), false, null, name);
- // TODO Auto-generated method stub
- return false;
- }
-
private Partition append_partition_common(RawStore ms, String dbName, String tableName,
List part_vals, EnvironmentContext envContext) throws InvalidObjectException,
AlreadyExistsException, MetaException {
@@ -2722,13 +2720,6 @@ public void alter_partitions(final String db_name, final String tbl_name,
return;
}
- public boolean create_index(Index index_def)
- throws IndexAlreadyExistsException, MetaException {
- endFunction(startFunction("create_index"), false, null);
- // TODO Auto-generated method stub
- throw new MetaException("Not yet implemented");
- }
-
@Override
public void alter_index(final String dbname, final String base_table_name,
final String index_name, final Index newIndex)
@@ -3823,6 +3814,7 @@ private void rethrowException(Exception e)
public PrincipalPrivilegeSet get_privilege_set(HiveObjectRef hiveObject,
String userName, List groupNames) throws MetaException,
TException {
+ firePreEvent(new PreAuthorizationCallEvent(this));
if (hiveObject.getObjectType() == HiveObjectType.COLUMN) {
String partName = getPartName(hiveObject);
return this.get_column_privilege_set(hiveObject.getDbName(), hiveObject
@@ -3860,7 +3852,7 @@ private String getPartName(HiveObjectRef hiveObject) throws MetaException {
return partName;
}
- public PrincipalPrivilegeSet get_column_privilege_set(final String dbName,
+ private PrincipalPrivilegeSet get_column_privilege_set(final String dbName,
final String tableName, final String partName, final String columnName,
final String userName, final List groupNames) throws MetaException,
TException {
@@ -3878,7 +3870,7 @@ public PrincipalPrivilegeSet get_column_privilege_set(final String dbName,
return ret;
}
- public PrincipalPrivilegeSet get_db_privilege_set(final String dbName,
+ private PrincipalPrivilegeSet get_db_privilege_set(final String dbName,
final String userName, final List groupNames) throws MetaException,
TException {
incrementCounter("get_db_privilege_set");
@@ -3894,7 +3886,7 @@ public PrincipalPrivilegeSet get_db_privilege_set(final String dbName,
return ret;
}
- public PrincipalPrivilegeSet get_partition_privilege_set(
+ private PrincipalPrivilegeSet get_partition_privilege_set(
final String dbName, final String tableName, final String partName,
final String userName, final List groupNames)
throws MetaException, TException {
@@ -3912,7 +3904,7 @@ public PrincipalPrivilegeSet get_partition_privilege_set(
return ret;
}
- public PrincipalPrivilegeSet get_table_privilege_set(final String dbName,
+ private PrincipalPrivilegeSet get_table_privilege_set(final String dbName,
final String tableName, final String userName,
final List groupNames) throws MetaException, TException {
incrementCounter("get_table_privilege_set");
@@ -3935,6 +3927,7 @@ public boolean grant_role(final String roleName,
final String grantor, final PrincipalType grantorType, final boolean grantOption)
throws MetaException, TException {
incrementCounter("add_role_member");
+ firePreEvent(new PreAuthorizationCallEvent(this));
if (PUBLIC.equals(roleName)) {
throw new MetaException("No user can be added to " + PUBLIC +". Since all users implictly"
+ " belong to " + PUBLIC + " role.");
@@ -3987,7 +3980,7 @@ private boolean isNewRoleAParent(String newRole, String curRole) throws MetaExce
public List list_roles(final String principalName,
final PrincipalType principalType) throws MetaException, TException {
incrementCounter("list_roles");
-
+ firePreEvent(new PreAuthorizationCallEvent(this));
List result = new ArrayList();
try {
List roleMaps = getMS().listRoles(principalName, principalType);
@@ -4012,7 +4005,7 @@ private boolean isNewRoleAParent(String newRole, String curRole) throws MetaExce
public boolean create_role(final Role role)
throws MetaException, TException {
incrementCounter("create_role");
-
+ firePreEvent(new PreAuthorizationCallEvent(this));
if (PUBLIC.equals(role.getRoleName())) {
throw new MetaException(PUBLIC + " role implictly exists. It can't be created.");
}
@@ -4031,6 +4024,7 @@ public boolean create_role(final Role role)
public boolean drop_role(final String roleName)
throws MetaException, TException {
incrementCounter("drop_role");
+ firePreEvent(new PreAuthorizationCallEvent(this));
if (ADMIN.equals(roleName) || PUBLIC.equals(roleName)) {
throw new MetaException(PUBLIC + "/" + ADMIN +" role can't be dropped.");
}
@@ -4048,7 +4042,7 @@ public boolean drop_role(final String roleName)
@Override
public List get_role_names() throws MetaException, TException {
incrementCounter("get_role_names");
-
+ firePreEvent(new PreAuthorizationCallEvent(this));
List ret = null;
try {
ret = getMS().listRoleNames();
@@ -4064,6 +4058,7 @@ public boolean drop_role(final String roleName)
public boolean grant_privileges(final PrivilegeBag privileges) throws MetaException,
TException {
incrementCounter("grant_privileges");
+ firePreEvent(new PreAuthorizationCallEvent(this));
Boolean ret = null;
try {
ret = getMS().grantPrivileges(privileges);
@@ -4079,7 +4074,7 @@ public boolean grant_privileges(final PrivilegeBag privileges) throws MetaExcept
public boolean revoke_role(final String roleName, final String userName,
final PrincipalType principalType) throws MetaException, TException {
incrementCounter("remove_role_member");
-
+ firePreEvent(new PreAuthorizationCallEvent(this));
if (PUBLIC.equals(roleName)) {
throw new MetaException(PUBLIC + " role can't be revoked.");
}
@@ -4100,6 +4095,7 @@ public boolean revoke_role(final String roleName, final String userName,
public boolean revoke_privileges(final PrivilegeBag privileges)
throws MetaException, TException {
incrementCounter("revoke_privileges");
+ firePreEvent(new PreAuthorizationCallEvent(this));
Boolean ret = null;
try {
ret = getMS().revokePrivileges(privileges);
@@ -4111,10 +4107,9 @@ public boolean revoke_privileges(final PrivilegeBag privileges)
return ret;
}
- public PrincipalPrivilegeSet get_user_privilege_set(final String userName,
+ private PrincipalPrivilegeSet get_user_privilege_set(final String userName,
final List groupNames) throws MetaException, TException {
incrementCounter("get_user_privilege_set");
-
PrincipalPrivilegeSet ret = null;
try {
ret = getMS().getUserPrivilegeSet(userName, groupNames);
@@ -4126,14 +4121,11 @@ public PrincipalPrivilegeSet get_user_privilege_set(final String userName,
return ret;
}
- public PrincipalType getPrincipalType(String principalType) {
- return PrincipalType.valueOf(principalType);
- }
-
@Override
public List list_privileges(String principalName,
PrincipalType principalType, HiveObjectRef hiveObject)
throws MetaException, TException {
+ firePreEvent(new PreAuthorizationCallEvent(this));
if (hiveObject.getObjectType() == null) {
return getAllPrivileges(principalName, principalType);
}
@@ -4178,7 +4170,7 @@ public PrincipalType getPrincipalType(String principalType) {
return privs;
}
- public List list_table_column_privileges(
+ private List list_table_column_privileges(
final String principalName, final PrincipalType principalType,
final String dbName, final String tableName, final String columnName)
throws MetaException, TException {
@@ -4218,7 +4210,7 @@ public PrincipalType getPrincipalType(String principalType) {
}
}
- public List list_partition_column_privileges(
+ private List list_partition_column_privileges(
final String principalName, final PrincipalType principalType,
final String dbName, final String tableName, final List partValues,
final String columnName) throws MetaException, TException {
@@ -4259,7 +4251,7 @@ public PrincipalType getPrincipalType(String principalType) {
}
}
- public List list_db_privileges(final String principalName,
+ private List list_db_privileges(final String principalName,
final PrincipalType principalType, final String dbName)
throws MetaException, TException {
incrementCounter("list_security_db_grant");
@@ -4296,7 +4288,7 @@ public PrincipalType getPrincipalType(String principalType) {
}
}
- public List list_partition_privileges(
+ private List list_partition_privileges(
final String principalName, final PrincipalType principalType,
final String dbName, final String tableName, final List partValues)
throws MetaException, TException {
@@ -4338,7 +4330,7 @@ public PrincipalType getPrincipalType(String principalType) {
}
}
- public List list_table_privileges(
+ private List list_table_privileges(
final String principalName, final PrincipalType principalType,
final String dbName, final String tableName) throws MetaException,
TException {
@@ -4376,7 +4368,7 @@ public PrincipalType getPrincipalType(String principalType) {
}
}
- public List list_global_privileges(
+ private List list_global_privileges(
final String principalName, final PrincipalType principalType)
throws MetaException, TException {
incrementCounter("list_security_user_grant");
@@ -4872,6 +4864,7 @@ public GetPrincipalsInRoleResponse get_principals_in_role(GetPrincipalsInRoleReq
throws MetaException, TException {
incrementCounter("get_principals_in_role");
+ firePreEvent(new PreAuthorizationCallEvent(this));
Exception ex = null;
List roleMaps = null;
try {
@@ -4892,6 +4885,7 @@ public GetRoleGrantsForPrincipalResponse get_role_grants_for_principal(
GetRoleGrantsForPrincipalRequest request) throws MetaException, TException {
incrementCounter("get_role_grants_for_principal");
+ firePreEvent(new PreAuthorizationCallEvent(this));
Exception ex = null;
List roleMaps = null;
try {
@@ -4965,6 +4959,13 @@ public static String getDelegationToken(String owner, String renewer)
}
/**
+ * @return true if remote metastore has been created
+ */
+ public static boolean isMetaStoreRemote() {
+ return isMetaStoreRemote;
+ }
+
+ /**
* Renew a delegation token to extend its lifetime.
*
* @param tokenStrForm
@@ -5125,7 +5126,7 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
HiveConf conf, Lock startLock, Condition startCondition,
MetaStoreThread.BooleanPointer startedServing) throws Throwable {
try {
-
+ isMetaStoreRemote = true;
// Server will create new threads up to max as necessary. After an idle
// period, it will destory threads to keep the number of threads in the
// pool to min.
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAuthorizationCallEvent.java b/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAuthorizationCallEvent.java
new file mode 100644
index 0000000..446a6ee
--- /dev/null
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreAuthorizationCallEvent.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.events;
+
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+
+public class PreAuthorizationCallEvent extends PreEventContext {
+
+ public PreAuthorizationCallEvent (HMSHandler handler) {
+ super(PreEventType.AUTHORIZATION_API_CALL, handler);
+ }
+
+}
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java b/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java
index 5021a73..4499485 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java
@@ -36,7 +36,8 @@
ALTER_PARTITION,
CREATE_DATABASE,
DROP_DATABASE,
- LOAD_PARTITION_DONE
+ LOAD_PARTITION_DONE,
+ AUTHORIZATION_API_CALL,
}
private final PreEventType eventType;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
index eafbeff..9051ba6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
@@ -18,8 +18,11 @@
package org.apache.hadoop.hive.ql.metadata;
+import java.util.ArrayList;
import java.util.List;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -29,7 +32,7 @@
import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
import org.apache.hadoop.io.Text;
@@ -110,6 +113,10 @@ public static String escapeString(String str) {
static final byte[] tabUnescapeBytes = "\t".getBytes();
static final byte[] ctrlABytes = "\u0001".getBytes();
+
+ public static final Log LOG = LogFactory.getLog(HiveUtils.class);
+
+
public static Text escapeText(Text text) {
int length = text.getLength();
byte[] textBytes = text.getBytes();
@@ -276,14 +283,14 @@ public static String lightEscapeString(String str) {
public static String unparseIdentifier(String identifier) {
return unparseIdentifier(identifier, null);
}
-
+
public static String unparseIdentifier(String identifier, Configuration conf) {
// In the future, if we support arbitrary characters in
// identifiers, then we'll need to escape any backticks
// in identifier by doubling them up.
-
+
// the time has come
- String qIdSupport = conf == null ? null :
+ String qIdSupport = conf == null ? null :
HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT);
if ( qIdSupport != null && !"none".equals(qIdSupport) ) {
identifier = identifier.replaceAll("`", "``");
@@ -301,8 +308,7 @@ public static HiveStorageHandler getStorageHandler(
Class extends HiveStorageHandler> handlerClass =
(Class extends HiveStorageHandler>)
Class.forName(className, true, JavaUtils.getClassLoader());
- HiveStorageHandler storageHandler = (HiveStorageHandler)
- ReflectionUtils.newInstance(handlerClass, conf);
+ HiveStorageHandler storageHandler = ReflectionUtils.newInstance(handlerClass, conf);
return storageHandler;
} catch (ClassNotFoundException e) {
throw new HiveException("Error in loading storage handler."
@@ -324,8 +330,7 @@ public static HiveIndexHandler getIndexHandler(HiveConf conf,
Class extends HiveIndexHandler> handlerClass =
(Class extends HiveIndexHandler>)
Class.forName(indexHandlerClass, true, JavaUtils.getClassLoader());
- HiveIndexHandler indexHandler = (HiveIndexHandler)
- ReflectionUtils.newInstance(handlerClass, conf);
+ HiveIndexHandler indexHandler = ReflectionUtils.newInstance(handlerClass, conf);
return indexHandler;
} catch (ClassNotFoundException e) {
throw new HiveException("Error in loading index handler."
@@ -334,16 +339,27 @@ public static HiveIndexHandler getIndexHandler(HiveConf conf,
}
@SuppressWarnings("unchecked")
- public static HiveAuthorizationProvider getAuthorizeProviderManager(
+ public static List getMetaStoreAuthorizeProviderManagers(
Configuration conf, HiveConf.ConfVars authorizationProviderConfKey,
HiveAuthenticationProvider authenticator) throws HiveException {
- return getAuthorizeProviderManager(conf, authorizationProviderConfKey, authenticator, false);
+
+ String clsStrs = HiveConf.getVar(conf, authorizationProviderConfKey);
+ if(clsStrs == null){
+ return null;
+ }
+ List authProviders = new ArrayList();
+ for (String clsStr : clsStrs.trim().split(",")) {
+ LOG.info("Adding metastore authorization provider: " + clsStr);
+ authProviders.add((HiveMetastoreAuthorizationProvider) getAuthorizeProviderManager(conf,
+ clsStr, authenticator, false));
+ }
+ return authProviders;
}
/**
* Create a new instance of HiveAuthorizationProvider
* @param conf
- * @param authorizationProviderConfKey
+ * @param authzClassName - authorization provider class name
* @param authenticator
* @param nullIfOtherClass - return null if configuration
* does not point to a HiveAuthorizationProvider subclass
@@ -352,18 +368,16 @@ public static HiveAuthorizationProvider getAuthorizeProviderManager(
*/
@SuppressWarnings("unchecked")
public static HiveAuthorizationProvider getAuthorizeProviderManager(
- Configuration conf, HiveConf.ConfVars authorizationProviderConfKey,
+ Configuration conf, String authzClassName,
HiveAuthenticationProvider authenticator, boolean nullIfOtherClass) throws HiveException {
- String clsStr = HiveConf.getVar(conf, authorizationProviderConfKey);
-
HiveAuthorizationProvider ret = null;
try {
Class extends HiveAuthorizationProvider> cls = null;
- if (clsStr == null || clsStr.trim().equals("")) {
+ if (authzClassName == null || authzClassName.trim().equals("")) {
cls = DefaultHiveAuthorizationProvider.class;
} else {
- Class> configClass = Class.forName(clsStr, true, JavaUtils.getClassLoader());
+ Class> configClass = Class.forName(authzClassName, true, JavaUtils.getClassLoader());
if(nullIfOtherClass && !HiveAuthorizationProvider.class.isAssignableFrom(configClass) ){
return null;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
index 81442a2..930285e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.ql.security.authorization;
+import java.util.List;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -77,12 +79,12 @@ protected HiveMetastoreAuthenticationProvider initialValue() {
}
};
- private final ThreadLocal tAuthorizer
- = new ThreadLocal() {
+ private final ThreadLocal> tAuthorizers
+ = new ThreadLocal>() {
@Override
- protected HiveMetastoreAuthorizationProvider initialValue() {
+ protected List initialValue() {
try {
- return (HiveMetastoreAuthorizationProvider) HiveUtils.getAuthorizeProviderManager(
+ return HiveUtils.getMetaStoreAuthorizeProviderManagers(
tConfig.get(), HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER, tAuthenticator.get());
} catch (HiveException he) {
throw new IllegalStateException("Authorization provider instantiation failure",he);
@@ -113,12 +115,16 @@ public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectE
tConfig.set(context.getHandler().getConf());
// Warning note : HMSHandler.getHiveConf() is not thread-unique, .getConf() is.
tAuthenticator.get().setConf(tConfig.get());
- tAuthorizer.get().setConf(tConfig.get());
+ for(HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()){
+ authorizer.setConf(tConfig.get());
+ }
tConfigSetOnAuths.set(true); // set so we don't repeat this initialization
}
tAuthenticator.get().setMetaStoreHandler(context.getHandler());
- tAuthorizer.get().setMetaStoreHandler(context.getHandler());
+ for(HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()){
+ authorizer.setMetaStoreHandler(context.getHandler());
+ }
switch (context.getEventType()) {
case CREATE_TABLE:
@@ -148,18 +154,34 @@ public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectE
case LOAD_PARTITION_DONE:
// noop for now
break;
+ case AUTHORIZATION_API_CALL:
+ authorizeAuthorizationAPICall();
default:
break;
}
}
+ private void authorizeAuthorizationAPICall() throws InvalidOperationException, MetaException {
+ for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+ try {
+ authorizer.authorizeAuthorizationApiInvocation();
+ } catch (AuthorizationException e) {
+ throw invalidOperationException(e);
+ } catch (HiveException e) {
+ throw metaException(e);
+ }
+ }
+ }
+
private void authorizeCreateDatabase(PreCreateDatabaseEvent context)
throws InvalidOperationException, MetaException {
try {
- tAuthorizer.get().authorize(new Database(context.getDatabase()),
- HiveOperation.CREATEDATABASE.getInputRequiredPrivileges(),
- HiveOperation.CREATEDATABASE.getOutputRequiredPrivileges());
+ for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+ authorizer.authorize(new Database(context.getDatabase()),
+ HiveOperation.CREATEDATABASE.getInputRequiredPrivileges(),
+ HiveOperation.CREATEDATABASE.getOutputRequiredPrivileges());
+ }
} catch (AuthorizationException e) {
throw invalidOperationException(e);
} catch (HiveException e) {
@@ -170,9 +192,11 @@ private void authorizeCreateDatabase(PreCreateDatabaseEvent context)
private void authorizeDropDatabase(PreDropDatabaseEvent context)
throws InvalidOperationException, MetaException {
try {
- tAuthorizer.get().authorize(new Database(context.getDatabase()),
- HiveOperation.DROPDATABASE.getInputRequiredPrivileges(),
- HiveOperation.DROPDATABASE.getOutputRequiredPrivileges());
+ for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+ authorizer.authorize(new Database(context.getDatabase()),
+ HiveOperation.DROPDATABASE.getInputRequiredPrivileges(),
+ HiveOperation.DROPDATABASE.getOutputRequiredPrivileges());
+ }
} catch (AuthorizationException e) {
throw invalidOperationException(e);
} catch (HiveException e) {
@@ -183,9 +207,12 @@ private void authorizeDropDatabase(PreDropDatabaseEvent context)
private void authorizeCreateTable(PreCreateTableEvent context)
throws InvalidOperationException, MetaException {
try {
- tAuthorizer.get().authorize(new TableWrapper(context.getTable()),
- HiveOperation.CREATETABLE.getInputRequiredPrivileges(),
- HiveOperation.CREATETABLE.getOutputRequiredPrivileges());
+ org.apache.hadoop.hive.ql.metadata.Table wrappedTable = new TableWrapper(context.getTable());
+ for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+ authorizer.authorize(wrappedTable,
+ HiveOperation.CREATETABLE.getInputRequiredPrivileges(),
+ HiveOperation.CREATETABLE.getOutputRequiredPrivileges());
+ }
} catch (AuthorizationException e) {
throw invalidOperationException(e);
} catch (HiveException e) {
@@ -196,9 +223,12 @@ private void authorizeCreateTable(PreCreateTableEvent context)
private void authorizeDropTable(PreDropTableEvent context)
throws InvalidOperationException, MetaException {
try {
- tAuthorizer.get().authorize(new TableWrapper(context.getTable()),
- HiveOperation.DROPTABLE.getInputRequiredPrivileges(),
- HiveOperation.DROPTABLE.getOutputRequiredPrivileges());
+ org.apache.hadoop.hive.ql.metadata.Table wrappedTable = new TableWrapper(context.getTable());
+ for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+ authorizer.authorize(wrappedTable,
+ HiveOperation.DROPTABLE.getInputRequiredPrivileges(),
+ HiveOperation.DROPTABLE.getOutputRequiredPrivileges());
+ }
} catch (AuthorizationException e) {
throw invalidOperationException(e);
} catch (HiveException e) {
@@ -208,10 +238,14 @@ private void authorizeDropTable(PreDropTableEvent context)
private void authorizeAlterTable(PreAlterTableEvent context)
throws InvalidOperationException, MetaException {
+
try {
- tAuthorizer.get().authorize(new TableWrapper(context.getOldTable()),
- null,
- new Privilege[]{Privilege.ALTER_METADATA});
+ org.apache.hadoop.hive.ql.metadata.Table wrappedTable = new TableWrapper(context.getOldTable());
+ for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+ authorizer.authorize(wrappedTable,
+ null,
+ new Privilege[]{Privilege.ALTER_METADATA});
+ }
} catch (AuthorizationException e) {
throw invalidOperationException(e);
} catch (HiveException e) {
@@ -223,9 +257,13 @@ private void authorizeAddPartition(PreAddPartitionEvent context)
throws InvalidOperationException, MetaException {
try {
for (org.apache.hadoop.hive.metastore.api.Partition mapiPart : context.getPartitions()) {
- tAuthorizer.get().authorize(new PartitionWrapper(mapiPart, context),
- HiveOperation.ALTERTABLE_ADDPARTS.getInputRequiredPrivileges(),
- HiveOperation.ALTERTABLE_ADDPARTS.getOutputRequiredPrivileges());
+ org.apache.hadoop.hive.ql.metadata.Partition wrappedPartiton = new PartitionWrapper(
+ mapiPart, context);
+ for(HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()){
+ authorizer.authorize(wrappedPartiton,
+ HiveOperation.ALTERTABLE_ADDPARTS.getInputRequiredPrivileges(),
+ HiveOperation.ALTERTABLE_ADDPARTS.getOutputRequiredPrivileges());
+ }
}
} catch (AuthorizationException e) {
throw invalidOperationException(e);
@@ -240,9 +278,13 @@ private void authorizeDropPartition(PreDropPartitionEvent context)
throws InvalidOperationException, MetaException {
try {
org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getPartition();
- tAuthorizer.get().authorize(new PartitionWrapper(mapiPart, context),
- HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(),
- HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges());
+ org.apache.hadoop.hive.ql.metadata.Partition wrappedPartition = new PartitionWrapper(
+ mapiPart, context);
+ for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+ authorizer.authorize(wrappedPartition,
+ HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(),
+ HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges());
+ }
} catch (AuthorizationException e) {
throw invalidOperationException(e);
} catch (NoSuchObjectException e) {
@@ -256,9 +298,13 @@ private void authorizeAlterPartition(PreAlterPartitionEvent context)
throws InvalidOperationException, MetaException {
try {
org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getNewPartition();
- tAuthorizer.get().authorize(new PartitionWrapper(mapiPart, context),
- null,
- new Privilege[]{Privilege.ALTER_METADATA});
+ org.apache.hadoop.hive.ql.metadata.Partition wrappedPartition = new PartitionWrapper(
+ mapiPart, context);
+ for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) {
+ authorizer.authorize(wrappedPartition,
+ null,
+ new Privilege[]{Privilege.ALTER_METADATA});
+ }
} catch (AuthorizationException e) {
throw invalidOperationException(e);
} catch (NoSuchObjectException e) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java
index 6685645..4120298 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/DefaultHiveMetastoreAuthorizationProvider.java
@@ -20,6 +20,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
public class DefaultHiveMetastoreAuthorizationProvider extends BitSetCheckedAuthorizationProvider
@@ -35,5 +36,10 @@ public void setMetaStoreHandler(HMSHandler handler) {
hive_db.setHandler(handler);
}
+ @Override
+ public void authorizeAuthorizationApiInvocation() throws HiveException, AuthorizationException {
+ // default no-op implementation
+ }
+
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
index 6a4fffb..23161e3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
@@ -19,6 +19,8 @@
package org.apache.hadoop.hive.ql.security.authorization;
import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
/**
* HiveMetastoreAuthorizationProvider : An extension of HiveAuthorizaytionProvider
@@ -37,4 +39,10 @@
*/
void setMetaStoreHandler(HMSHandler handler);
+ /**
+ * Authorize metastore authorization api call.
+ */
+ void authorizeAuthorizationApiInvocation() throws HiveException, AuthorizationException;
+
+
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java
new file mode 100644
index 0000000..7ad0193
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+/**
+ * If this authorizer is used, it allows authorization api to be invoked only in embedded
+ * metastore mode.
+ */
+public class MetaStoreAuthzAPIAuthorizerEmbedOnly extends HiveAuthorizationProviderBase
+ implements HiveMetastoreAuthorizationProvider {
+
+ public static final String errMsg = "Metastore Authorization api invocation for "
+ + "remote metastore is disabled in this configuration.";
+
+ @Override
+ public void init(Configuration conf) throws HiveException {
+ }
+
+ @Override
+ public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+ throws HiveException, AuthorizationException {
+ // not authorized by this implementation, ie operation is allowed by it
+ }
+
+ @Override
+ public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+ throws HiveException, AuthorizationException {
+ // not authorized by this implementation, ie operation is allowed by it
+ }
+
+ @Override
+ public void authorize(Table table, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+ throws HiveException, AuthorizationException {
+ // not authorized by this implementation, ie operation is allowed by it
+ }
+
+ @Override
+ public void authorize(Partition part, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+ throws HiveException, AuthorizationException {
+ // not authorized by this implementation, ie operation is allowed by it
+ }
+
+ @Override
+ public void authorize(Table table, Partition part, List columns,
+ Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException,
+ AuthorizationException {
+ // not authorized by this implementation, ie operation is allowed by it
+ }
+
+ @Override
+ public void setMetaStoreHandler(HMSHandler handler) {
+ // no-op - HMSHander not needed by this impl
+ }
+
+ @Override
+ public void authorizeAuthorizationApiInvocation() throws AuthorizationException {
+ if (HiveMetaStore.isMetaStoreRemote()) {
+ throw new AuthorizationException(errMsg);
+ }
+ }
+
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
index a84dd58..0dfd997 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
@@ -402,4 +402,9 @@ private static AccessControlException accessControlException(
return ace;
}
+ @Override
+ public void authorizeAuthorizationApiInvocation() throws HiveException, AuthorizationException {
+ // no-op - SBA does not attempt to authorize auth api call. Allow it
+ }
+
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 7feba1d..b48535c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -380,8 +380,9 @@ private void setupAuth() {
HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
authenticator.setSessionState(this);
+ String clsStr = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
authorizer = HiveUtils.getAuthorizeProviderManager(conf,
- HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, authenticator, true);
+ clsStr, authenticator, true);
if (authorizer == null) {
// if it was null, the new authorization plugin must be specified in