diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java new file mode 100644 index 0000000..b59d2e1 --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/StorageBasedMetastoreTestBase.java @@ -0,0 +1,134 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import java.net.URI; +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.cli.CliSessionState; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; +import org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hadoop.security.UserGroupInformation; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; + +/** + * Base class for some storage based authorization test classes + */ +public class StorageBasedMetastoreTestBase { + protected HiveConf clientHiveConf; + protected HiveMetaStoreClient msc; + protected Driver driver; + protected UserGroupInformation ugi; + private static int objNum = 0; + + protected String getAuthorizationProvider(){ + return StorageBasedAuthorizationProvider.class.getName(); + } + + protected HiveConf createHiveConf() throws Exception { + return new HiveConf(this.getClass()); + } + + @Before + public void setUp() throws Exception { + + int port = MetaStoreUtils.findFreePort(); + + // Turn on metastore-side authorization + System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, + AuthorizationPreEventListener.class.getName()); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname, + getAuthorizationProvider()); + System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname, + InjectableDummyAuthenticator.class.getName()); + + MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge()); + + clientHiveConf = createHiveConf(); + + // Turn off client-side authorization + clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED,false); + + clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); + clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); + clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + + clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); + clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); + + ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf); + + SessionState.start(new CliSessionState(clientHiveConf)); + msc = new HiveMetaStoreClient(clientHiveConf, null); + driver = new Driver(clientHiveConf); + + setupFakeUser(); + InjectableDummyAuthenticator.injectMode(false); + } + + protected void setupFakeUser() { + String fakeUser = "mal"; + List fakeGroupNames = new ArrayList(); + fakeGroupNames.add("groupygroup"); + + InjectableDummyAuthenticator.injectUserName(fakeUser); + InjectableDummyAuthenticator.injectGroupNames(fakeGroupNames); + } + + protected String setupUser() { + return ugi.getUserName(); + } + + protected String getTestTableName() { + return this.getClass().getSimpleName() + "tab" + ++objNum; + } + + protected String getTestDbName() { + return this.getClass().getSimpleName() + "db" + ++objNum; + } + + @After + public void tearDown() throws Exception { + InjectableDummyAuthenticator.injectMode(false); + } + + protected void setPermissions(String locn, String permissions) throws Exception { + FileSystem fs = FileSystem.get(new URI(locn), clientHiveConf); + fs.setPermission(new Path(locn), FsPermission.valueOf(permissions)); + } + + protected void validateCreateDb(Database expectedDb, String dbName) { + Assert.assertEquals(expectedDb.getName().toLowerCase(), dbName.toLowerCase()); + } + + +} diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java index 6cf8565..dfaa080 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java @@ -18,88 +18,19 @@ package org.apache.hadoop.hive.ql.security; -import java.net.URI; -import java.util.ArrayList; -import java.util.List; - -import junit.framework.TestCase; - -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.hive.cli.CliSessionState; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; -import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener; -import org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider; -import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.shims.ShimLoader; -import org.apache.hadoop.security.UserGroupInformation; +import org.junit.Assert; +import org.junit.Test; /** * Test cases focusing on drop table permission checks */ -public class TestStorageBasedMetastoreAuthorizationDrops extends TestCase{ - protected HiveConf clientHiveConf; - protected HiveMetaStoreClient msc; - protected Driver driver; - protected UserGroupInformation ugi; - private static int objNum = 0; - - protected String getAuthorizationProvider(){ - return StorageBasedAuthorizationProvider.class.getName(); - } - - protected HiveConf createHiveConf() throws Exception { - return new HiveConf(this.getClass()); - } - - @Override - protected void setUp() throws Exception { - - super.setUp(); - - int port = MetaStoreUtils.findFreePort(); - - // Turn on metastore-side authorization - System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname, - AuthorizationPreEventListener.class.getName()); - System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname, - getAuthorizationProvider()); - System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname, - InjectableDummyAuthenticator.class.getName()); - - MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge()); - - clientHiveConf = createHiveConf(); - - // Turn off client-side authorization - clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED,false); - - clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port); - clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); - clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); - - clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, ""); - clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, ""); - - ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf); - - SessionState.start(new CliSessionState(clientHiveConf)); - msc = new HiveMetaStoreClient(clientHiveConf, null); - driver = new Driver(clientHiveConf); - - setupFakeUser(); - InjectableDummyAuthenticator.injectMode(false); - } - +public class TestStorageBasedMetastoreAuthorizationDrops extends StorageBasedMetastoreTestBase { + @Test public void testDropDatabase() throws Exception { dropDatabaseByOtherUser("-rwxrwxrwx", 0); dropDatabaseByOtherUser("-rwxrwxrwt", 1); @@ -111,12 +42,12 @@ public void testDropDatabase() throws Exception { * @param expectedRet - expected return code for drop by other user * @throws Exception */ - private void dropDatabaseByOtherUser(String perm, int expectedRet) throws Exception { + public void dropDatabaseByOtherUser(String perm, int expectedRet) throws Exception { String dbName = getTestDbName(); setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), perm); CommandProcessorResponse resp = driver.run("create database " + dbName); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); Database db = msc.getDatabase(dbName); validateCreateDb(db, dbName); @@ -124,10 +55,11 @@ private void dropDatabaseByOtherUser(String perm, int expectedRet) throws Except resp = driver.run("drop database " + dbName); - assertEquals(expectedRet, resp.getResponseCode()); + Assert.assertEquals(expectedRet, resp.getResponseCode()); } + @Test public void testDropTable() throws Exception { dropTableByOtherUser("-rwxrwxrwx", 0); dropTableByOtherUser("-rwxrwxrwt", 1); @@ -138,13 +70,13 @@ public void testDropTable() throws Exception { * @param expectedRet expected return code on drop table * @throws Exception */ - private void dropTableByOtherUser(String perm, int expectedRet) throws Exception { + public void dropTableByOtherUser(String perm, int expectedRet) throws Exception { String dbName = getTestDbName(); String tblName = getTestTableName(); setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx"); CommandProcessorResponse resp = driver.run("create database " + dbName); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); Database db = msc.getDatabase(dbName); validateCreateDb(db, dbName); @@ -152,18 +84,19 @@ private void dropTableByOtherUser(String perm, int expectedRet) throws Exception String dbDotTable = dbName + "." + tblName; resp = driver.run("create table " + dbDotTable + "(i int)"); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); InjectableDummyAuthenticator.injectMode(true); resp = driver.run("drop table " + dbDotTable); - assertEquals(expectedRet, resp.getResponseCode()); + Assert.assertEquals(expectedRet, resp.getResponseCode()); } /** * Drop view should not be blocked by SBA. View will not have any location to drop. * @throws Exception */ + @Test public void testDropView() throws Exception { String dbName = getTestDbName(); String tblName = getTestTableName(); @@ -171,7 +104,7 @@ public void testDropView() throws Exception { setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx"); CommandProcessorResponse resp = driver.run("create database " + dbName); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); Database db = msc.getDatabase(dbName); validateCreateDb(db, dbName); @@ -179,20 +112,20 @@ public void testDropView() throws Exception { String dbDotTable = dbName + "." + tblName; resp = driver.run("create table " + dbDotTable + "(i int)"); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); String dbDotView = dbName + "." + viewName; resp = driver.run("create view " + dbDotView + " as select * from " + dbDotTable); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); resp = driver.run("drop view " + dbDotView); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); resp = driver.run("drop table " + dbDotTable); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); } - + @Test public void testDropPartition() throws Exception { dropPartitionByOtherUser("-rwxrwxrwx", 0); dropPartitionByOtherUser("-rwxrwxrwt", 1); @@ -203,70 +136,29 @@ public void testDropPartition() throws Exception { * @param expectedRet expected return code * @throws Exception */ - private void dropPartitionByOtherUser(String perm, int expectedRet) throws Exception { + public void dropPartitionByOtherUser(String perm, int expectedRet) throws Exception { String dbName = getTestDbName(); String tblName = getTestTableName(); setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx"); CommandProcessorResponse resp = driver.run("create database " + dbName); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); Database db = msc.getDatabase(dbName); validateCreateDb(db, dbName); setPermissions(db.getLocationUri(), "-rwxrwxrwx"); String dbDotTable = dbName + "." + tblName; resp = driver.run("create table " + dbDotTable + "(i int) partitioned by (b string)"); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); Table tab = msc.getTable(dbName, tblName); setPermissions(tab.getSd().getLocation(), perm); resp = driver.run("alter table " + dbDotTable + " add partition (b='2011')"); - assertEquals(0, resp.getResponseCode()); + Assert.assertEquals(0, resp.getResponseCode()); InjectableDummyAuthenticator.injectMode(true); resp = driver.run("alter table " + dbDotTable + " drop partition (b='2011')"); - assertEquals(expectedRet, resp.getResponseCode()); - } - - private void setupFakeUser() { - String fakeUser = "mal"; - List fakeGroupNames = new ArrayList(); - fakeGroupNames.add("groupygroup"); - - InjectableDummyAuthenticator.injectUserName(fakeUser); - InjectableDummyAuthenticator.injectGroupNames(fakeGroupNames); - } - - private String setupUser() { - return ugi.getUserName(); - } - - private String getTestTableName() { - return this.getClass().getSimpleName() + "tab" + ++objNum; + Assert.assertEquals(expectedRet, resp.getResponseCode()); } - private String getTestDbName() { - return this.getClass().getSimpleName() + "db" + ++objNum; - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - InjectableDummyAuthenticator.injectMode(false); - } - - protected void setPermissions(String locn, String permissions) throws Exception { - FileSystem fs = FileSystem.get(new URI(locn), clientHiveConf); - fs.setPermission(new Path(locn), FsPermission.valueOf(permissions)); - } - - private void validateCreateDb(Database expectedDb, String dbName) { - assertEquals(expectedDb.getName().toLowerCase(), dbName.toLowerCase()); - } - - private void validateCreateTable(Table expectedTable, String tblName, String dbName) { - assertNotNull(expectedTable); - assertEquals(expectedTable.getTableName().toLowerCase(),tblName.toLowerCase()); - assertEquals(expectedTable.getDbName().toLowerCase(),dbName.toLowerCase()); - } } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationReads.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationReads.java new file mode 100644 index 0000000..6f45a59 --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationReads.java @@ -0,0 +1,122 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security; + +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.junit.Assert; +import org.junit.Test; + +/** + * Test cases focusing on drop table permission checks + */ +public class TestStorageBasedMetastoreAuthorizationReads extends StorageBasedMetastoreTestBase { + + @Test + public void testReadTableSuccess() throws Exception { + readTableByOtherUser("-rwxrwxrwx", true); + } + + @Test + public void testReadTableFailure() throws Exception { + readTableByOtherUser("-rwxrwx---", false); + } + + /** + * @param perm dir permission for table dir + * @param isSuccess if command was successful + * @throws Exception + */ + private void readTableByOtherUser(String perm, boolean isSuccess) throws Exception { + String dbName = getTestDbName(); + String tblName = getTestTableName(); + setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx"); + + CommandProcessorResponse resp = driver.run("create database " + dbName); + Assert.assertEquals(0, resp.getResponseCode()); + Database db = msc.getDatabase(dbName); + validateCreateDb(db, dbName); + + setPermissions(db.getLocationUri(), "-rwxrwxrwx"); + + String dbDotTable = dbName + "." + tblName; + resp = driver.run("create table " + dbDotTable + "(i int) partitioned by (date string)"); + Assert.assertEquals(0, resp.getResponseCode()); + Table tab = msc.getTable(dbName, tblName); + setPermissions(tab.getSd().getLocation(), perm); + + InjectableDummyAuthenticator.injectMode(true); + + testCmd(driver, "DESCRIBE " + dbDotTable, isSuccess); + testCmd(driver, "DESCRIBE EXTENDED " + dbDotTable, isSuccess); + testCmd(driver, "SHOW PARTITIONS " + dbDotTable, isSuccess); + testCmd(driver, "SHOW COLUMNS IN " + tblName + " IN " + dbName, isSuccess); + testCmd(driver, "use " + dbName, true); + testCmd(driver, "SHOW TABLE EXTENDED LIKE " + tblName, isSuccess); + + } + + @Test + public void testReadDbSuccess() throws Exception { + readDbByOtherUser("-rwxrwxrwx", true); + } + + @Test + public void testReadDbFailure() throws Exception { + readDbByOtherUser("-rwxrwx---", false); + } + + + /** + * @param perm dir permission for database dir + * @param isSuccess if command was successful + * @throws Exception + */ + private void readDbByOtherUser(String perm, boolean isSuccess) throws Exception { + String dbName = getTestDbName(); + setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), perm); + + CommandProcessorResponse resp = driver.run("create database " + dbName); + Assert.assertEquals(0, resp.getResponseCode()); + Database db = msc.getDatabase(dbName); + validateCreateDb(db, dbName); + setPermissions(db.getLocationUri(), perm); + + InjectableDummyAuthenticator.injectMode(true); + + testCmd(driver, "DESCRIBE DATABASE " + dbName, isSuccess); + testCmd(driver, "DESCRIBE DATABASE EXTENDED " + dbName, isSuccess); + testCmd(driver, "SHOW TABLES IN " + dbName, isSuccess); + driver.run("use " + dbName); + testCmd(driver, "SHOW TABLES ", isSuccess); + + } + + private void testCmd(Driver driver, String cmd, boolean isSuccess) + throws CommandNeedRetryException { + CommandProcessorResponse resp = driver.run(cmd); + Assert.assertEquals(isSuccess, resp.getResponseCode() == 0); + } + + +} diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 5b5102c..fc71061 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -48,9 +48,6 @@ import java.util.concurrent.locks.ReentrantLock; import java.util.regex.Pattern; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableListMultimap; -import com.google.common.collect.Multimaps; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -171,6 +168,8 @@ import org.apache.hadoop.hive.metastore.events.PreDropTableEvent; import org.apache.hadoop.hive.metastore.events.PreEventContext; import org.apache.hadoop.hive.metastore.events.PreLoadPartitionDoneEvent; +import org.apache.hadoop.hive.metastore.events.PreReadDatabaseEvent; +import org.apache.hadoop.hive.metastore.events.PreReadTableEvent; import org.apache.hadoop.hive.metastore.model.MDBPrivilege; import org.apache.hadoop.hive.metastore.model.MGlobalPrivilege; import org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege; @@ -203,7 +202,10 @@ import com.facebook.fb303.FacebookBase; import com.facebook.fb303.fb_status; import com.google.common.base.Splitter; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.Lists; +import com.google.common.collect.Multimaps; /** * TODO:pc remove application logic to a separate interface. @@ -836,6 +838,7 @@ public Database get_database(final String name) throws NoSuchObjectException, Exception ex = null; try { db = getMS().getDatabase(name); + firePreEvent(new PreReadDatabaseEvent(db, this)); } catch (MetaException e) { ex = e; throw e; @@ -1630,6 +1633,7 @@ public Table get_table(final String dbname, final String name) throws MetaExcept throw new NoSuchObjectException(dbname + "." + name + " table not found"); } + firePreEvent(new PreReadTableEvent(t, this)); } catch (Exception e) { ex = e; if (e instanceof MetaException) { @@ -2671,6 +2675,7 @@ public Partition get_partition(final String db_name, final String tbl_name, Partition ret = null; Exception ex = null; try { + fireReadTablePreEvent(db_name, tbl_name); ret = getMS().getPartition(db_name, tbl_name, part_vals); } catch (Exception e) { ex = e; @@ -2687,6 +2692,28 @@ public Partition get_partition(final String db_name, final String tbl_name, return ret; } + /** + * Fire a pre-event for read table operation, if there are any + * pre-event listeners registered + * + * @param db_name + * @param tbl_name + * @throws MetaException + * @throws NoSuchObjectException + */ + private void fireReadTablePreEvent(String dbName, String tblName) throws MetaException, NoSuchObjectException { + if(preListeners.size() > 0) { + // do this only if there is a pre event listener registered (avoid unnecessary + // metastore api call) + Table t = getMS().getTable(dbName, tblName); + if (t == null) { + throw new NoSuchObjectException(dbName + "." + tblName + + " table not found"); + } + firePreEvent(new PreReadTableEvent(t, this)); + } + } + @Override public Partition get_partition_with_auth(final String db_name, final String tbl_name, final List part_vals, @@ -2694,7 +2721,7 @@ public Partition get_partition_with_auth(final String db_name, throws MetaException, NoSuchObjectException, TException { startPartitionFunction("get_partition_with_auth", db_name, tbl_name, part_vals); - + fireReadTablePreEvent(db_name, tbl_name); Partition ret = null; Exception ex = null; try { @@ -2716,7 +2743,7 @@ public Partition get_partition_with_auth(final String db_name, public List get_partitions(final String db_name, final String tbl_name, final short max_parts) throws NoSuchObjectException, MetaException { startTableFunction("get_partitions", db_name, tbl_name); - + fireReadTablePreEvent(db_name, tbl_name); List ret = null; Exception ex = null; try { @@ -2797,7 +2824,7 @@ public Partition get_partition_with_auth(final String db_name, private static class StorageDescriptorKey { - private StorageDescriptor sd; + private final StorageDescriptor sd; StorageDescriptorKey(StorageDescriptor sd) { this.sd = sd; } @@ -2919,9 +2946,9 @@ private static boolean is_partition_spec_grouping_enabled(Table table) { @Override public List get_partition_names(final String db_name, final String tbl_name, - final short max_parts) throws MetaException { + final short max_parts) throws MetaException, NoSuchObjectException { startTableFunction("get_partition_names", db_name, tbl_name); - + fireReadTablePreEvent(db_name, tbl_name); List ret = null; Exception ex = null; try { @@ -3038,14 +3065,7 @@ public void alter_partitions(final String db_name, final String tbl_name, Exception ex = null; try { for (Partition tmpPart : new_parts) { - try { - for (MetaStorePreEventListener listener : preListeners) { - listener.onEvent( - new PreAlterPartitionEvent(db_name, tbl_name, null, tmpPart, this)); - } - } catch (NoSuchObjectException e) { - throw new MetaException(e.getMessage()); - } + firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, null, tmpPart, this)); } oldParts = alterHandler.alterPartitions(getMS(), wh, db_name, tbl_name, new_parts); @@ -3413,6 +3433,7 @@ public String get_config_value(String name, String defaultValue) private Partition get_partition_by_name_core(final RawStore ms, final String db_name, final String tbl_name, final String part_name) throws MetaException, NoSuchObjectException, TException { + fireReadTablePreEvent(db_name, tbl_name); List partVals = null; try { partVals = getPartValsFromName(ms, db_name, tbl_name, part_name); @@ -3434,7 +3455,6 @@ public Partition get_partition_by_name(final String db_name, final String tbl_na startFunction("get_partition_by_name", ": db=" + db_name + " tbl=" + tbl_name + " part=" + part_name); - Partition ret = null; Exception ex = null; try { @@ -3564,6 +3584,7 @@ public boolean drop_partition_by_name_with_environment_context(final String db_n final List groupNames) throws MetaException, TException, NoSuchObjectException { startPartitionFunction("get_partitions_ps_with_auth", db_name, tbl_name, part_vals); + fireReadTablePreEvent(db_name, tbl_name); List ret = null; Exception ex = null; try { @@ -3586,6 +3607,7 @@ public boolean drop_partition_by_name_with_environment_context(final String db_n final String tbl_name, final List part_vals, final short max_parts) throws MetaException, TException, NoSuchObjectException { startPartitionFunction("get_partitions_names_ps", db_name, tbl_name, part_vals); + fireReadTablePreEvent(db_name, tbl_name); List ret = null; Exception ex = null; try { @@ -4056,7 +4078,7 @@ public boolean update_partition_column_statistics(ColumnStatistics colStats) } finally { endFunction("write_partition_column_statistics: ", ret != false, null, tableName); } - } + } @Override public boolean delete_partition_column_statistics(String dbName, String tableName, @@ -4111,7 +4133,7 @@ public boolean delete_table_column_statistics(String dbName, String tableName, S final String tblName, final String filter, final short maxParts) throws MetaException, NoSuchObjectException, TException { startTableFunction("get_partitions_by_filter", dbName, tblName); - + fireReadTablePreEvent(dbName, tblName); List ret = null; Exception ex = null; try { @@ -4161,6 +4183,7 @@ public PartitionsByExprResult get_partitions_by_expr( PartitionsByExprRequest req) throws TException { String dbName = req.getDbName(), tblName = req.getTblName(); startTableFunction("get_partitions_by_expr", dbName, tblName); + fireReadTablePreEvent(dbName, tblName); PartitionsByExprResult ret = null; Exception ex = null; try { @@ -4197,7 +4220,7 @@ private void rethrowException(Exception e) throws MetaException, NoSuchObjectException, TException { startTableFunction("get_partitions_by_names", dbName, tblName); - + fireReadTablePreEvent(dbName, tblName); List ret = null; Exception ex = null; try { @@ -5422,7 +5445,7 @@ public boolean set_aggr_stats_for(SetPartitionsStatsRequest request) } } - + public static IHMSHandler newHMSHandler(String name, HiveConf hiveConf) throws MetaException { return newHMSHandler(name, hiveConf, false); } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java b/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java index 4499485..dbc3247 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreEventContext.java @@ -38,6 +38,8 @@ DROP_DATABASE, LOAD_PARTITION_DONE, AUTHORIZATION_API_CALL, + READ_TABLE, + READ_DATABASE } private final PreEventType eventType; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreReadDatabaseEvent.java b/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreReadDatabaseEvent.java new file mode 100644 index 0000000..d415620 --- /dev/null +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreReadDatabaseEvent.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.events; + +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.api.Database; + +/** + * Database read event + */ +public class PreReadDatabaseEvent extends PreEventContext { + private final Database db; + + public PreReadDatabaseEvent(Database db, HMSHandler handler) { + super(PreEventType.READ_DATABASE, handler); + this.db = db; + } + + /** + * @return the db + */ + public Database getDatabase() { + return db; + } + +} diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreReadTableEvent.java b/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreReadTableEvent.java new file mode 100644 index 0000000..b93da67 --- /dev/null +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/events/PreReadTableEvent.java @@ -0,0 +1,43 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore.events; + +import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.api.Table; + +/** + * Table read event + */ +public class PreReadTableEvent extends PreEventContext { + + private final Table table; + + public PreReadTableEvent(Table table, HMSHandler handler) { + super(PreEventType.READ_TABLE, handler); + this.table = table; + } + + /** + * @return the table + */ + public Table getTable() { + return table; + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java index 930285e..4ffb92e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java @@ -40,6 +40,8 @@ import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent; import org.apache.hadoop.hive.metastore.events.PreDropTableEvent; import org.apache.hadoop.hive.metastore.events.PreEventContext; +import org.apache.hadoop.hive.metastore.events.PreReadDatabaseEvent; +import org.apache.hadoop.hive.metastore.events.PreReadTableEvent; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; @@ -136,6 +138,12 @@ public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectE case ALTER_TABLE: authorizeAlterTable((PreAlterTableEvent)context); break; + case READ_TABLE: + authorizeReadTable((PreReadTableEvent)context); + break; + case READ_DATABASE: + authorizeReadDatabase((PreReadDatabaseEvent)context); + break; case ADD_PARTITION: authorizeAddPartition((PreAddPartitionEvent)context); break; @@ -162,6 +170,34 @@ public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectE } + private void authorizeReadTable(PreReadTableEvent context) throws InvalidOperationException, + MetaException { + try { + org.apache.hadoop.hive.ql.metadata.Table wrappedTable = new TableWrapper(context.getTable()); + for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { + authorizer.authorize(wrappedTable, new Privilege[] { Privilege.SELECT }, null); + } + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + + private void authorizeReadDatabase(PreReadDatabaseEvent context) + throws InvalidOperationException, MetaException { + try { + for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { + authorizer.authorize(new Database(context.getDatabase()), + new Privilege[] { Privilege.SELECT }, null); + } + } catch (AuthorizationException e) { + throw invalidOperationException(e); + } catch (HiveException e) { + throw metaException(e); + } + } + private void authorizeAuthorizationAPICall() throws InvalidOperationException, MetaException { for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { try {