diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index cf3f50ba64a28e63b58badcc2bce7738bf434245..803da4d847ed7c1c91843bfab748dcaf0d435f52 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -3483,8 +3483,9 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "hive.spark.client.secret.bits," + "hive.spark.client.rpc.server.address," + "hive.spark.client.rpc.server.port," + - "bonecp.,"+ - "hikari.", + "bonecp.," + + "hikari.," + + "dbcp.", "Comma separated list of configuration options which are immutable at runtime"), HIVE_CONF_HIDDEN_LIST("hive.conf.hidden.list", METASTOREPWD.varname + "," + HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname diff --git metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 0db1bc059c0f6a36e721d441dbd466736d270eca..a01e77bd65ebfb49a8f4d64d020a837975a97b5e 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -549,8 +549,9 @@ private static synchronized PersistenceManagerFactory getPMF() { if (pmf == null) { HiveConf conf = new HiveConf(ObjectStore.class); - DataSourceProvider dsp = DataSourceProviderFactory.getDataSourceProvider(conf); - if (dsp == null) { + DataSourceProvider dsp = null; + if (!DataSourceProviderFactory.hasProviderSpecificConfigurations(conf) || + (dsp = DataSourceProviderFactory.getDataSourceProvider(conf)) == null) { pmf = JDOHelper.getPersistenceManagerFactory(prop); } else { try { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/datasource/BoneCPDataSourceProvider.java metastore/src/java/org/apache/hadoop/hive/metastore/datasource/BoneCPDataSourceProvider.java index 34765b0b2f34698a3ba29751a65a108e4c997502..d76151e50c59eee37d323a79bd36122aa401027f 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/datasource/BoneCPDataSourceProvider.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/datasource/BoneCPDataSourceProvider.java @@ -84,12 +84,6 @@ public boolean supports(Configuration configuration) { String poolingType = configuration.get( MetastoreConf.ConfVars.CONNECTION_POOLING_TYPE.varname).toLowerCase(); - if (BONECP.equals(poolingType)) { - int boneCpPropsNr = DataSourceProvider.getPrefixedProperties(configuration, BONECP).size(); - LOG.debug("Found " + boneCpPropsNr + " nr. of bonecp specific configurations"); - return boneCpPropsNr > 0; - } - LOG.debug("Configuration requested " + poolingType + " pooling, BoneCpDSProvider exiting"); - return false; + return BONECP.equals(poolingType); } } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DataSourceProviderFactory.java metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DataSourceProviderFactory.java index 1eb792ce4503dfd82ce5660a39a5f33c1db86913..5e05d0ddaf652d3fe46c32f0a495c1e7891ef6de 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DataSourceProviderFactory.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DataSourceProviderFactory.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.metastore.datasource; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; import org.apache.hadoop.conf.Configuration; /** @@ -27,7 +28,8 @@ public abstract class DataSourceProviderFactory { private static final ImmutableList FACTORIES = - ImmutableList.builder().add(new HikariCPDataSourceProvider(), new BoneCPDataSourceProvider()).build(); + ImmutableList.builder().add(new HikariCPDataSourceProvider(), new BoneCPDataSourceProvider(), + new DbCPDataSourceProvider()).build(); /** * @param hdpConfig hadoop configuration @@ -44,4 +46,20 @@ public static DataSourceProvider getDataSourceProvider(Configuration hdpConfig) return null; } + /** + * @param hdpConfig hadoop configuration + * @return true if the configuration contains settings specifically aimed for one + * of the supported conntection pool implementations. + */ + public static boolean hasProviderSpecificConfigurations(Configuration hdpConfig) { + + return Iterables.any(hdpConfig, entry -> + { + String key = entry.getKey(); + return key != null && (key.startsWith(BoneCPDataSourceProvider.BONECP) || + key.startsWith(HikariCPDataSourceProvider.HIKARI) || + key.startsWith(DbCPDataSourceProvider.DBCP)); + }); + } + } diff --git metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DbCPDataSourceProvider.java metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DbCPDataSourceProvider.java new file mode 100644 index 0000000000000000000000000000000000000000..fe09d7e987fc3d45fba5a58c82b128d3d94c97db --- /dev/null +++ metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DbCPDataSourceProvider.java @@ -0,0 +1,101 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.datasource; + +import org.apache.commons.dbcp.ConnectionFactory; +import org.apache.commons.dbcp.DriverManagerConnectionFactory; +import org.apache.commons.dbcp.PoolableConnectionFactory; +import org.apache.commons.dbcp.PoolingDataSource; +import org.apache.commons.pool.impl.GenericObjectPool; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.sql.DataSource; +import java.sql.SQLException; + +/** + * DataSourceProvider for the dbcp connection pool. + */ +public class DbCPDataSourceProvider implements DataSourceProvider { + + private static final Logger LOG = LoggerFactory.getLogger(DbCPDataSourceProvider.class); + + public static final String DBCP = "dbcp"; + private static final String CONNECTION_TIMEOUT_PROPERTY= "dbcp.maxWait"; + private static final String CONNECTION_MAX_IDLE_PROPERTY= "dbcp.maxIdle"; + private static final String CONNECTION_MIN_IDLE_PROPERTY= "dbcp.minIdle"; + private static final String CONNECTION_TEST_BORROW_PROPERTY= "dbcp.testOnBorrow"; + private static final String CONNECTION_MIN_EVICT_MILLIS_PROPERTY= "dbcp.MinEvictableIdleTimeMillis"; + private static final String CONNECTION_TEST_IDLEPROPERTY= "dbcp.testWhileIdle"; + + @Override + public DataSource create(Configuration hdpConfig) throws SQLException { + + LOG.debug("Creating dbcp connection pool for the MetaStore"); + + String driverUrl = DataSourceProvider.getMetastoreJdbcDriverUrl(hdpConfig); + String user = DataSourceProvider.getMetastoreJdbcUser(hdpConfig); + String passwd = DataSourceProvider.getMetastoreJdbcPasswd(hdpConfig); + int maxPoolSize = hdpConfig.getInt( + MetastoreConf.ConfVars.CONNECTION_POOLING_MAX_CONNECTIONS.varname, + ((Long)MetastoreConf.ConfVars.CONNECTION_POOLING_MAX_CONNECTIONS.defaultVal).intValue()); + + long connectionTimeout = hdpConfig.getLong(CONNECTION_TIMEOUT_PROPERTY, 30000L); + int connectionMaxIlde = hdpConfig.getInt(CONNECTION_MAX_IDLE_PROPERTY, GenericObjectPool.DEFAULT_MAX_IDLE); + int connectionMinIlde = hdpConfig.getInt(CONNECTION_MIN_IDLE_PROPERTY, GenericObjectPool.DEFAULT_MIN_IDLE); + boolean testOnBorrow = hdpConfig.getBoolean(CONNECTION_TEST_BORROW_PROPERTY, + GenericObjectPool.DEFAULT_TEST_ON_BORROW); + long evictionTimeMillis = hdpConfig.getLong(CONNECTION_MIN_EVICT_MILLIS_PROPERTY, + GenericObjectPool.DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS); + boolean testWhileIdle = hdpConfig.getBoolean(CONNECTION_TEST_IDLEPROPERTY, + GenericObjectPool.DEFAULT_TEST_WHILE_IDLE); + GenericObjectPool objectPool = new GenericObjectPool(); + final GenericObjectPool.Config config = new GenericObjectPool.Config(); + //https://commons.apache.org/proper/commons-pool/api-1.6/org/apache/commons/pool/impl/GenericObjectPool.html#setMaxActive(int) + objectPool.setMaxActive(maxPoolSize); + objectPool.setConfig(config); + objectPool.setMaxWait(connectionTimeout); + objectPool.setMaxIdle(connectionMaxIlde); + objectPool.setMinIdle(connectionMinIlde); + objectPool.setTestOnBorrow(testOnBorrow); + objectPool.setTestWhileIdle(testWhileIdle); + objectPool.setMinEvictableIdleTimeMillis(evictionTimeMillis); + ConnectionFactory connFactory = new DriverManagerConnectionFactory(driverUrl, user, passwd); + // This doesn't get used, but it's still necessary, see + // http://svn.apache.org/viewvc/commons/proper/dbcp/branches/DBCP_1_4_x_BRANCH/doc/ManualPoolingDataSourceExample.java?view=markup + PoolableConnectionFactory poolConnFactory = + new PoolableConnectionFactory(connFactory, objectPool, null, null, false, true); + return new PoolingDataSource(objectPool); + } + + @Override + public boolean mayReturnClosedConnection() { + // Only BoneCP should return true + return false; + } + + @Override + public boolean supports(Configuration configuration) { + String poolingType = + configuration.get( + MetastoreConf.ConfVars.CONNECTION_POOLING_TYPE.varname).toLowerCase(); + return DBCP.equals(poolingType); + } +} diff --git metastore/src/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java metastore/src/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java index 9b3d6d5d7078301254a4cff0a0d8e5de44d03bc3..7c68834ddb5a80bdaf119c2459b7d286b4c5508a 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java @@ -79,13 +79,7 @@ public boolean supports(Configuration configuration) { String poolingType = configuration.get( MetastoreConf.ConfVars.CONNECTION_POOLING_TYPE.varname).toLowerCase(); - if (HIKARI.equals(poolingType)) { - int hikariPropsNr = DataSourceProvider.getPrefixedProperties(configuration, HIKARI).size(); - LOG.debug("Found " + hikariPropsNr + " nr. of hikari specific configurations"); - return hikariPropsNr > 0; - } - LOG.debug("Configuration requested " + poolingType + " pooling, HikariCpDSProvider exiting"); - return false; + return HIKARI.equals(poolingType); } private Properties replacePrefix(Properties props) { diff --git metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java index 1887c052be1e535539cc5ba4c634fa28dfc22f9d..d339126f33b0e65cb14ce9234d79fa7c3b4264c6 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java @@ -19,9 +19,6 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.dbcp.ConnectionFactory; -import org.apache.commons.dbcp.DriverManagerConnectionFactory; -import org.apache.commons.dbcp.PoolableConnectionFactory; import org.apache.commons.lang.NotImplementedException; import org.apache.hadoop.hive.common.ServerUtils; import org.apache.hadoop.hive.common.classification.InterfaceAudience; @@ -30,16 +27,13 @@ import org.apache.hadoop.hive.metastore.DatabaseProduct; import org.apache.hadoop.hive.metastore.HouseKeeperService; import org.apache.hadoop.hive.metastore.Warehouse; -import org.apache.hadoop.hive.metastore.datasource.BoneCPDataSourceProvider; import org.apache.hadoop.hive.metastore.datasource.DataSourceProvider; -import org.apache.hadoop.hive.metastore.datasource.HikariCPDataSourceProvider; import org.apache.hadoop.hive.metastore.metrics.Metrics; import org.apache.hadoop.hive.metastore.metrics.MetricsConstants; +import org.apache.hadoop.hive.metastore.datasource.DataSourceProviderFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.commons.dbcp.PoolingDataSource; -import org.apache.commons.pool.impl.GenericObjectPool; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.StringableMap; import org.apache.hadoop.hive.conf.HiveConf; @@ -3193,22 +3187,10 @@ private static synchronized DataSource setupJdbcConnectionPool(HiveConf conf, in String connectionPooler = conf.getVar( HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE).toLowerCase(); - if ("bonecp".equals(connectionPooler)) { - doRetryOnConnPool = true; // Enable retries to work around BONECP bug. - return new BoneCPDataSourceProvider().create(conf); - } else if ("dbcp".equals(connectionPooler)) { - GenericObjectPool objectPool = new GenericObjectPool(); - //https://commons.apache.org/proper/commons-pool/api-1.6/org/apache/commons/pool/impl/GenericObjectPool.html#setMaxActive(int) - objectPool.setMaxActive(maxPoolSize); - objectPool.setMaxWait(getConnectionTimeoutMs); - ConnectionFactory connFactory = new DriverManagerConnectionFactory(driverUrl, user, passwd); - // This doesn't get used, but it's still necessary, see - // http://svn.apache.org/viewvc/commons/proper/dbcp/branches/DBCP_1_4_x_BRANCH/doc/ManualPoolingDataSourceExample.java?view=markup - PoolableConnectionFactory poolConnFactory = - new PoolableConnectionFactory(connFactory, objectPool, null, null, false, true); - return new PoolingDataSource(objectPool); - } else if ("hikaricp".equals(connectionPooler)) { - return new HikariCPDataSourceProvider().create(conf); + DataSourceProvider dsp = DataSourceProviderFactory.getDataSourceProvider(conf); + if (dsp != null) { + doRetryOnConnPool = dsp.mayReturnClosedConnection(); + return dsp.create(conf); } else if ("none".equals(connectionPooler)) { LOG.info("Choosing not to pool JDBC connections"); return new NoPoolConnectionPool(conf); diff --git metastore/src/test/org/apache/hadoop/hive/metastore/datasource/TestDataSourceProviderFactory.java metastore/src/test/org/apache/hadoop/hive/metastore/datasource/TestDataSourceProviderFactory.java index daea544c7126fad26f02e39a95ea0bc0e4847387..f1548b211f2986d28ac1af1db60fa930b2004db2 100644 --- metastore/src/test/org/apache/hadoop/hive/metastore/datasource/TestDataSourceProviderFactory.java +++ metastore/src/test/org/apache/hadoop/hive/metastore/datasource/TestDataSourceProviderFactory.java @@ -19,6 +19,7 @@ import com.jolbox.bonecp.BoneCPDataSource; import com.zaxxer.hikari.HikariDataSource; +import org.apache.commons.dbcp.PoolingDataSource; import org.apache.hadoop.hive.conf.HiveConf; import org.junit.Assert; import org.junit.Before; @@ -44,18 +45,28 @@ public void testNoDataSourceCreatedWithoutProps() throws SQLException { DataSourceProvider dsp = DataSourceProviderFactory.getDataSourceProvider(conf); Assert.assertNull(dsp); - conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE, BoneCPDataSourceProvider.BONECP); + conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE, "dummy"); dsp = DataSourceProviderFactory.getDataSourceProvider(conf); Assert.assertNull(dsp); } @Test + public void testCanCreateDataSourceForSpecificProp() throws SQLException { + + Assert.assertFalse( + DataSourceProviderFactory.hasProviderSpecificConfigurations(conf)); + + conf.set("bonecp.dummy.var", "dummy"); + + Assert.assertTrue( + DataSourceProviderFactory.hasProviderSpecificConfigurations(conf)); + } + + @Test public void testCreateBoneCpDataSource() throws SQLException { conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE, BoneCPDataSourceProvider.BONECP); - conf.set(BoneCPDataSourceProvider.BONECP + ".firstProp", "value"); - conf.set(BoneCPDataSourceProvider.BONECP + ".secondProp", "value"); DataSourceProvider dsp = DataSourceProviderFactory.getDataSourceProvider(conf); Assert.assertNotNull(dsp); @@ -164,10 +175,50 @@ public void testSetHikariCpBooleanProperty() throws SQLException { Assert.assertTrue(ds instanceof HikariDataSource); Assert.assertEquals(false, ((HikariDataSource)ds).isAllowPoolSuspension()); } + + @Test + public void testCreateDbCpDataSource() throws SQLException { + + conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE, DbCPDataSourceProvider.DBCP); + + DataSourceProvider dsp = DataSourceProviderFactory.getDataSourceProvider(conf); + Assert.assertNotNull(dsp); + + DataSource ds = dsp.create(conf); + Assert.assertTrue(ds instanceof PoolingDataSource); + } + @Test(expected = IllegalArgumentException.class) public void testBoneCPConfigCannotBeSet() { conf.addToRestrictList(BoneCPDataSourceProvider.BONECP); conf.verifyAndSet(BoneCPDataSourceProvider.BONECP + ".disableJMX", "true"); } + @Test(expected = IllegalArgumentException.class) + public void testHikariCPConfigCannotBeSet() { + conf.addToRestrictList(HikariCPDataSourceProvider.HIKARI); + conf.verifyAndSet(HikariCPDataSourceProvider.HIKARI + ".allowPoolSuspension", "true"); + } + + @Test(expected = IllegalArgumentException.class) + public void testDbCPConfigCannotBeSet() { + conf.addToRestrictList(DbCPDataSourceProvider.DBCP); + conf.verifyAndSet(DbCPDataSourceProvider.DBCP + ".booleanProp", "true"); + } + + @Test + public void testHasProviderSpecificConfiguration() throws SQLException { + + Assert.assertFalse(DataSourceProviderFactory.hasProviderSpecificConfigurations(conf)); + + conf.set(HikariCPDataSourceProvider.HIKARI + ".dummyConf", "dummyValue"); + Assert.assertTrue(DataSourceProviderFactory.hasProviderSpecificConfigurations(conf)); + + conf.set(DbCPDataSourceProvider.DBCP + ".dummyConf", "dummyValue"); + Assert.assertTrue(DataSourceProviderFactory.hasProviderSpecificConfigurations(conf)); + + conf.set(BoneCPDataSourceProvider.BONECP + ".dummyConf", "dummyValue"); + Assert.assertTrue(DataSourceProviderFactory.hasProviderSpecificConfigurations(conf)); + } + }