diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 04166db86521b08ab0ee8a23f21b6e1d03b088cd..ae44b083beae96581dfad2ecee33f208a8a6e34c 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1191,7 +1191,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal */ @Deprecated METASTORE_EVENT_MESSAGE_FACTORY("hive.metastore.event.message.factory", - "org.apache.hadoop.hive.metastore.messaging.json.JSONMessageEncoder", + "org.apache.hadoop.hive.metastore.messaging.json.gzip.GzipJSONMessageEncoder", "Factory class for making encoding and decoding messages in the events generated."), /** * @deprecated Use MetastoreConf.EXECUTE_SET_UGI diff --git a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java index da08d2f8a3d389825356abed03d7e39075400e31..5b43323a62a014cc4f86d16ab39ccf12e31b577c 100644 --- a/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java +++ b/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java @@ -41,6 +41,8 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NotificationEvent; import org.apache.hadoop.hive.metastore.api.PartitionEventType; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.messaging.json.JSONMessageEncoder; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.io.RCFileInputFormat; import org.apache.hadoop.hive.ql.io.RCFileOutputFormat; @@ -80,7 +82,6 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertArrayEquals; -import org.apache.hadoop.util.Shell; import javax.annotation.Nullable; @@ -120,6 +121,8 @@ public static void startMetaStoreServer() throws Exception { System.setProperty(HiveConf.ConfVars.METASTORE_TRANSACTIONAL_EVENT_LISTENERS.varname, DbNotificationListener.class.getName()); // turn on db notification listener on metastore + System.setProperty(MetastoreConf.ConfVars.EVENT_MESSAGE_FACTORY.getHiveName(), + JSONMessageEncoder.class.getName()); msPort = MetaStoreTestUtils.startMetaStoreWithRetry(); securityManager = System.getSecurityManager(); System.setSecurityManager(new NoExitSecurityManager()); diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java index c4c8f50702d651d30786a9d060901f72bb9fb0a1..ae7e57271dcceb105cf50deb868bfd16b3a33284 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/api/TestHCatClientNotification.java @@ -38,6 +38,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.messaging.json.JSONMessageEncoder; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hive.hcatalog.common.HCatConstants; import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; @@ -66,8 +67,9 @@ @BeforeClass public static void setupClient() throws Exception { - HiveConf conf = new HiveConf(); conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS, - DbNotificationListener.class.getName()); + HiveConf conf = new HiveConf(); + conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS, DbNotificationListener.class.getName()); + conf.setVar(HiveConf.ConfVars.METASTORE_EVENT_MESSAGE_FACTORY, JSONMessageEncoder.class.getName()); hCatClient = HCatClient.create(conf); md = MessageFactory.getInstance().getDeserializer(); } diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java index 9eacfd8be8f0a3289e9c9ceb945975c60770cabd..0c3e046604ae05154d9df5535a46f08303b67ae5 100644 --- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java +++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/TestDbNotificationListener.java @@ -274,6 +274,7 @@ public static void connectToMetastore() throws Exception { conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, DummyRawStoreFailEvent.class.getName()); MetastoreConf.setTimeVar(conf, MetastoreConf.ConfVars.EVENT_DB_LISTENER_CLEAN_INTERVAL, CLEANUP_SLEEP_TIME, TimeUnit.SECONDS); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.EVENT_MESSAGE_FACTORY, JSONMessageEncoder.class.getName()); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); SessionState.start(new CliSessionState(conf)); diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplAcidTablesWithJsonMessage.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplAcidTablesWithJsonMessage.java index c16799da2802bb28e312434cc202f0841722573a..747b7a8c3fc2bc7128508bb477d58d170866e0a5 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplAcidTablesWithJsonMessage.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplAcidTablesWithJsonMessage.java @@ -18,12 +18,16 @@ package org.apache.hadoop.hive.ql.parse; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.messaging.json.JSONMessageEncoder; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.rules.TestRule; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; public class TestReplAcidTablesWithJsonMessage extends TestReplicationScenariosAcidTables { @@ -32,7 +36,10 @@ @BeforeClass public static void classLevelSetup() throws Exception { - internalBeforeClassSetup(Collections.emptyMap(), TestReplAcidTablesWithJsonMessage.class); + Map overrides = new HashMap<>(); + overrides.put(MetastoreConf.ConfVars.EVENT_MESSAGE_FACTORY.getHiveName(), + JSONMessageEncoder.class.getCanonicalName()); + internalBeforeClassSetup(overrides, TestReplAcidTablesWithJsonMessage.class); } @Before diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplAcrossInstancesWithJsonMessageFormat.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplAcrossInstancesWithJsonMessageFormat.java index 0ec027503221027aec92e96147ac24ec0f936414..05517c7380c4ca9d7eb8d9da0b2a94fdeecb0e77 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplAcrossInstancesWithJsonMessageFormat.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplAcrossInstancesWithJsonMessageFormat.java @@ -18,13 +18,16 @@ package org.apache.hadoop.hive.ql.parse; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.messaging.json.JSONMessageEncoder; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.rules.TestRule; import java.util.ArrayList; -import java.util.Collections; +import java.util.HashMap; +import java.util.Map; public class TestReplAcrossInstancesWithJsonMessageFormat extends TestReplicationScenariosAcrossInstances { @@ -34,7 +37,10 @@ @BeforeClass public static void classLevelSetup() throws Exception { - internalBeforeClassSetup(Collections.emptyMap(), TestReplicationScenarios.class); + Map overrides = new HashMap<>(); + overrides.put(MetastoreConf.ConfVars.EVENT_MESSAGE_FACTORY.getHiveName(), + JSONMessageEncoder.class.getCanonicalName()); + internalBeforeClassSetup(overrides, TestReplicationScenarios.class); } @Before diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplIncrementalLoadAcidTablesWithJsonMessage.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplIncrementalLoadAcidTablesWithJsonMessage.java index 422508d061660b1179ce4f88d04934737da135a7..cd273f0835379fcde6486957f495ad8c9fcf044e 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplIncrementalLoadAcidTablesWithJsonMessage.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplIncrementalLoadAcidTablesWithJsonMessage.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.parse; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.messaging.json.JSONMessageEncoder; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; @@ -25,6 +27,8 @@ import org.junit.Ignore; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; @Ignore public class TestReplIncrementalLoadAcidTablesWithJsonMessage @@ -35,8 +39,10 @@ @BeforeClass public static void classLevelSetup() throws Exception { - internalBeforeClassSetup(Collections.emptyMap(), - TestReplIncrementalLoadAcidTablesWithJsonMessage.class); + Map overrides = new HashMap<>(); + overrides.put(MetastoreConf.ConfVars.EVENT_MESSAGE_FACTORY.getHiveName(), + JSONMessageEncoder.class.getCanonicalName()); + internalBeforeClassSetup(overrides, TestReplIncrementalLoadAcidTablesWithJsonMessage.class); } @Before diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplTableMigrationWithJsonFormat.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplTableMigrationWithJsonFormat.java index 0151ed03254a3822c972f9f85f4d3248df2cd73d..5e7bf7e885fd458bee73eb0c7acb0ff9b7e3f422 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplTableMigrationWithJsonFormat.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplTableMigrationWithJsonFormat.java @@ -17,13 +17,19 @@ */ package org.apache.hadoop.hive.ql.parse; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.messaging.json.JSONMessageEncoder; import org.junit.BeforeClass; -import java.util.Collections; +import java.util.HashMap; +import java.util.Map; public class TestReplTableMigrationWithJsonFormat extends TestReplicationWithTableMigration { @BeforeClass public static void classLevelSetup() throws Exception { - internalBeforeClassSetup(Collections.emptyMap()); + Map overrides = new HashMap<>(); + overrides.put(MetastoreConf.ConfVars.EVENT_MESSAGE_FACTORY.getHiveName(), + JSONMessageEncoder.class.getCanonicalName()); + internalBeforeClassSetup(overrides); } } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplWithJsonMessageFormat.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplWithJsonMessageFormat.java index f76dc1d79afbacef3be84fd553fe61ee904409d9..6d9c31fe17d00425c61a6b6892147cb0ecd82750 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplWithJsonMessageFormat.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplWithJsonMessageFormat.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hive.ql.parse; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.messaging.json.JSONMessageEncoder; import org.apache.hive.hcatalog.api.repl.ReplicationV1CompatRule; import org.junit.BeforeClass; import org.junit.Rule; @@ -24,6 +26,8 @@ import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; public class TestReplWithJsonMessageFormat extends TestReplicationScenarios { @Rule @@ -33,7 +37,10 @@ @BeforeClass public static void setUpBeforeClass() throws Exception { - internalBeforeClassSetup(Collections.emptyMap(), false); + Map overrides = new HashMap<>(); + overrides.put(MetastoreConf.ConfVars.EVENT_MESSAGE_FACTORY.getHiveName(), + JSONMessageEncoder.class.getCanonicalName()); + internalBeforeClassSetup(overrides, false); } } diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index 3a9912f3988f6a70babb5bddd0a1583259a2d48a..ce352e5b403d854d80a239003324210b0145ee26 100644 --- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -544,7 +544,7 @@ public static ConfVars getMetaConf(String name) { "Alternatively, configure hive.metastore.transactional.event.listeners to ensure both are invoked in same JDO transaction."), EVENT_MESSAGE_FACTORY("metastore.event.message.factory", "hive.metastore.event.message.factory", - "org.apache.hadoop.hive.metastore.messaging.json.JSONMessageEncoder", + "org.apache.hadoop.hive.metastore.messaging.json.gzip.GzipJSONMessageEncoder", "Factory class for making encoding and decoding messages in the events generated."), EVENT_NOTIFICATION_PARAMETERS_EXCLUDE_PATTERNS("metastore.notification.parameters.exclude.patterns", "hive.metastore.notification.parameters.exclude.patterns", "", diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index c0ba8673d9cd7d9c8b0994bf40e84966d8c9d288..41f399becdf6e9dab7d188459a97b52dbd7f1592 100644 --- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -1597,7 +1597,8 @@ private void drop_database_core(RawStore ms, String catName, ConfVars.BATCH_RETRIEVE_MAX); // First pass will drop the materialized views - List materializedViewNames = get_tables_by_type(name, ".*", TableType.MATERIALIZED_VIEW.toString()); + List materializedViewNames = getTablesByTypeCore(catName, name, ".*", + TableType.MATERIALIZED_VIEW.toString()); int startIndex = 0; // retrieve the tables from the metastore in batches to alleviate memory constraints while (startIndex < materializedViewNames.size()) { @@ -5265,7 +5266,7 @@ private void alter_table_core(String catName, String dbname, String name, Table try { ret = getMS().getTables(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern); ret = FilterUtils.filterTableNamesIfEnabled(isServerFilterEnabled, filterHook, - parsedDbName[CAT_NAME], dbname, ret); + parsedDbName[CAT_NAME], parsedDbName[DB_NAME], ret); } catch (MetaException e) { ex = e; throw e; @@ -5287,7 +5288,9 @@ private void alter_table_core(String catName, String dbname, String name, Table Exception ex = null; String[] parsedDbName = parseDbName(dbname, conf); try { - ret = getMS().getTables(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern, TableType.valueOf(tableType)); + ret = getTablesByTypeCore(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], pattern, tableType); + ret = FilterUtils.filterTableNamesIfEnabled(isServerFilterEnabled, filterHook, + parsedDbName[CAT_NAME], parsedDbName[DB_NAME], ret); } catch (MetaException e) { ex = e; throw e; @@ -5300,6 +5303,27 @@ private void alter_table_core(String catName, String dbname, String name, Table return ret; } + private List getTablesByTypeCore(final String catName, final String dbname, + final String pattern, final String tableType) throws MetaException { + startFunction("getTablesByTypeCore", ": catName=" + catName + + ": db=" + dbname + " pat=" + pattern + ",type=" + tableType); + + List ret = null; + Exception ex = null; + try { + ret = getMS().getTables(catName, dbname, pattern, TableType.valueOf(tableType)); + } catch (MetaException e) { + ex = e; + throw e; + } catch (Exception e) { + ex = e; + throw newMetaException(e); + } finally { + endFunction("getTablesByTypeCore", ret != null, ex); + } + return ret; + } + @Override public List get_materialized_views_for_rewriting(final String dbname) throws MetaException { @@ -5367,6 +5391,7 @@ private void alter_table_core(String catName, String dbname, String name, Table try { try { tbl = get_table_core(parsedDbName[CAT_NAME], parsedDbName[DB_NAME], base_table_name); + firePreEvent(new PreReadTableEvent(tbl, this)); } catch (NoSuchObjectException e) { throw new UnknownTableException(e.getMessage()); } diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java index 49c7d88fcb082d421b454c13690de492ca9c9f95..23faa7444a8e9f9c010290539f89b9d8b44f3aa8 100644 --- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java @@ -356,6 +356,7 @@ protected void testFilterForTables(boolean filterAtServer) throws Exception { } assertEquals(0, client.getTables(DBNAME1, "*").size()); + assertEquals(0, client.getTables(DBNAME1, "*", TableType.MANAGED_TABLE).size()); assertEquals(0, client.getAllTables(DBNAME1).size()); assertEquals(0, client.getTables(DBNAME1, TAB2).size()); } diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHmsServerAuthorization.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHmsServerAuthorization.java new file mode 100644 index 0000000000000000000000000000000000000000..494b5a27ceee800ba610508abfa0b022745c0139 --- /dev/null +++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHmsServerAuthorization.java @@ -0,0 +1,195 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.metastore; + +import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.InvalidOperationException; +import org.apache.hadoop.hive.metastore.events.PreEventContext; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.experimental.categories.Category; + +import java.util.List; +import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder; +import org.apache.hadoop.hive.metastore.client.builder.TableBuilder; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars; +import org.junit.After; +import org.junit.Before; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.util.StringUtils; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import org.apache.hadoop.hive.metastore.client.builder.PartitionBuilder; + +/** + * Test the filtering behavior at HMS client and HMS server. The configuration at each test + * changes, and therefore HMS client and server are created for each test case + */ +@Category(MetastoreUnitTest.class) +public class TestHmsServerAuthorization { + + /** + * Implementation of MetaStorePreEventListener that throw MetaException when configuration in + * its function onEvent() + */ + public static class DummyAuthorizationListenerImpl extends MetaStorePreEventListener { + private static volatile boolean throwExceptionAtCall = false; + public DummyAuthorizationListenerImpl(Configuration config) { + super(config); + } + + @Override + public void onEvent(PreEventContext context) + throws MetaException, NoSuchObjectException, InvalidOperationException { + if (throwExceptionAtCall) { + throw new MetaException("Authorization fails"); + } + } + } + + private static HiveMetaStoreClient client; + private static Configuration conf; + + private static final int DEFAULT_LIMIT_PARTITION_REQUEST = 100; + + private static String DBNAME1 = "testdb1"; + private static String DBNAME2 = "testdb2"; + private static final String TAB1 = "tab1"; + private static final String TAB2 = "tab2"; + + + protected static HiveMetaStoreClient createClient(Configuration metaStoreConf) throws Exception { + try { + return new HiveMetaStoreClient(metaStoreConf); + } catch (Throwable e) { + System.err.println("Unable to open the metastore"); + System.err.println(StringUtils.stringifyException(e)); + throw new Exception(e); + } + } + + @BeforeClass + public static void setUpForTest() throws Exception { + + // make sure env setup works + TestHmsServerAuthorization.DummyAuthorizationListenerImpl.throwExceptionAtCall = false; + + conf = MetastoreConf.newMetastoreConf(); + MetastoreConf.setLongVar(conf, ConfVars.THRIFT_CONNECTION_RETRIES, 3); + MetastoreConf.setBoolVar(conf, ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + MetastoreConf.setClass(conf, ConfVars.PRE_EVENT_LISTENERS, DummyAuthorizationListenerImpl.class, + MetaStorePreEventListener.class); + MetastoreConf.setBoolVar(conf, ConfVars.METRICS_ENABLED, true); + conf.set("hive.key1", "value1"); + conf.set("hive.key2", "http://www.example.com"); + conf.set("hive.key3", ""); + conf.set("hive.key4", "0"); + conf.set("datanucleus.autoCreateTables", "false"); + conf.set("hive.in.test", "true"); + + MetastoreConf.setLongVar(conf, ConfVars.BATCH_RETRIEVE_MAX, 2); + MetastoreConf.setLongVar(conf, ConfVars.LIMIT_PARTITION_REQUEST, DEFAULT_LIMIT_PARTITION_REQUEST); + MetastoreConf.setVar(conf, ConfVars.STORAGE_SCHEMA_READER_IMPL, "no.such.class"); + MetastoreConf.setBoolVar(conf, ConfVars.METASTORE_CLIENT_FILTER_ENABLED, false); + MetastoreConf.setBoolVar(conf, ConfVars.METASTORE_SERVER_FILTER_ENABLED, false); + + MetaStoreTestUtils.setConfForStandloneMode(conf); + + client = createClient(conf); + } + + @AfterClass + public static void tearDown() throws Exception { + if (client != null) { + // make sure tear down works + TestHmsServerAuthorization.DummyAuthorizationListenerImpl.throwExceptionAtCall = false; + + client.dropDatabase(DBNAME1, true, true, true); + client.dropDatabase(DBNAME2, true, true, true); + client.close(); + } + } + + /** + * This is called in each test after the configuration is set in each test case. + * @throws Exception + */ + protected void creatEnv(Configuration conf) throws Exception { + client.dropDatabase(DBNAME1, true, true, true); + client.dropDatabase(DBNAME2, true, true, true); + Database db1 = new DatabaseBuilder() + .setName(DBNAME1) + .setCatalogName(Warehouse.DEFAULT_CATALOG_NAME) + .create(client, conf); + Database db2 = new DatabaseBuilder() + .setName(DBNAME2) + .setCatalogName(Warehouse.DEFAULT_CATALOG_NAME) + .create(client, conf); + new TableBuilder() + .setDbName(DBNAME1) + .setTableName(TAB1) + .addCol("id", "int") + .addCol("name", "string") + .create(client, conf); + Table tab2 = new TableBuilder() + .setDbName(DBNAME1) + .setTableName(TAB2) + .addCol("id", "int") + .addPartCol("name", "string") + .create(client, conf); + new PartitionBuilder() + .inTable(tab2) + .addValue("value1") + .addToTable(client, conf); + new PartitionBuilder() + .inTable(tab2) + .addValue("value2") + .addToTable(client, conf); + } + + /** + * Test the pre-event listener is called in function get_fields at HMS server. + * @throws Exception + */ + @Test + public void testGetFields() throws Exception { + DBNAME1 = "db_test_get_fields_1"; + DBNAME2 = "db_test_get_fields_2"; + creatEnv(conf); + + // enable throwing exception, so we can check pre-envent listener is called + TestHmsServerAuthorization.DummyAuthorizationListenerImpl.throwExceptionAtCall = true; + + try { + List tableSchema = client.getFields(DBNAME1, TAB1); + fail("getFields() should fail with throw exception mode at server side"); + } catch (MetaException ex) { + boolean isMessageAuthorization = ex.getMessage().contains("Authorization fails"); + assertEquals(true, isMessageAuthorization); + } + } +}