diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index f1b58c5..9458bbf 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -824,7 +824,7 @@ private static void logInfo(String m) { private String startFunction(String function, String extraLogInfo) { incrementCounter(function); - logInfo((getThreadLocalIpAddress() == null ? "" : "source:" + getThreadLocalIpAddress() + " ") + + logInfo("START: " + (getThreadLocalIpAddress() == null ? "" : "source:" + getThreadLocalIpAddress() + " ") + function + extraLogInfo); com.codahale.metrics.Timer timer = Metrics.getOrCreateTimer(MetricsConstants.API_PREFIX + function); @@ -879,6 +879,10 @@ private void endFunction(String function, MetaStoreEndFunctionContext context) { for (MetaStoreEndFunctionListener listener : endFunctionListeners) { listener.onEndFunction(function, context); } + + logInfo("END: " + (getThreadLocalIpAddress() == null ? "" : "source:" + getThreadLocalIpAddress() + " ") + + function + " tbl=" + context.getInputTableName() + " success=" + context.isSuccess() + " exceptionMsg=" + + context.getException().getMessage()); } @Override @@ -1628,6 +1632,9 @@ public void create_table_with_environment_context(final Table tbl, } finally { endFunction("create_table", success, ex, tbl.getTableName()); } + if (!success) { + throw new MetaException("Commit transaction failed"); + } } @Override @@ -1660,6 +1667,9 @@ public void create_table_with_constraints(final Table tbl, } finally { endFunction("create_table", success, ex, tbl.getTableName()); } + if (!success) { + throw new MetaException("Commit transaction failed"); + } } @Override @@ -1705,6 +1715,9 @@ public void drop_constraint(DropConstraintRequest req) } endFunction("drop_constraint", success, ex, constraintName); } + if (!success) { + throw new MetaException("Commit transaction failed"); + } } @Override @@ -1757,6 +1770,9 @@ public void add_primary_key(AddPrimaryKeyRequest req) } endFunction("add_primary_key", success, ex, constraintName); } + if (!success) { + throw new MetaException("Commit transaction failed"); + } } @Override @@ -1809,6 +1825,9 @@ public void add_foreign_key(AddForeignKeyRequest req) } endFunction("add_foreign_key", success, ex, constraintName); } + if (!success) { + throw new MetaException("Commit transaction failed"); + } } @Override @@ -1861,6 +1880,9 @@ public void add_unique_constraint(AddUniqueConstraintRequest req) } endFunction("add_unique_constraint", success, ex, constraintName); } + if (!success) { + throw new MetaException("Commit transaction failed"); + } } @Override @@ -1913,6 +1935,9 @@ public void add_not_null_constraint(AddNotNullConstraintRequest req) } endFunction("add_not_null_constraint", success, ex, constraintName); } + if (!success) { + throw new MetaException("Commit transaction failed"); + } } private boolean is_table_exists(RawStore ms, String dbname, String name) @@ -2175,7 +2200,9 @@ public void drop_table_with_environment_context(final String dbname, final Strin } finally { endFunction("drop_table", success, ex, name); } - + if (!success) { + throw new MetaException("Commit transaction failed"); + } } private void updateStatsForTruncate(Map props, EnvironmentContext environmentContext) { @@ -4298,6 +4325,9 @@ public void alter_index(final String dbname, final String base_table_name, transactionalListenerResponses, ms); } } + if (!success) { + throw new MetaException("Commit transaction failed"); + } } @Override @@ -4404,6 +4434,9 @@ private void alter_table_core(final String dbname, final String name, final Tabl } finally { endFunction("alter_table", success, ex, name); } + if (!success) { + throw new MetaException("Commit transaction failed"); + } } @Override diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index 2e43dc8..e6dae82 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -349,6 +349,9 @@ public static ConfVars getMetaConf(String name) { CONNECTION_DRIVER("javax.jdo.option.ConnectionDriverName", "javax.jdo.option.ConnectionDriverName", "org.apache.derby.jdbc.EmbeddedDriver", "Driver class name for a JDBC metastore"), + CONNECTION_POOLING_CONNECTION_TIMEOUT("datanucleus.connectionPool.connectionTimeout", + "datanucleus.connectionPool.connectionTimeout", 30, TimeUnit.SECONDS, + "When connection pool is exhausted, wait until this time before throwing exception (default: 30s)"), CONNECTION_POOLING_MAX_CONNECTIONS("datanucleus.connectionPool.maxPoolSize", "datanucleus.connectionPool.maxPoolSize", 10, "Specify the maximum number of connections in the connection pool. Note: The configured size will be used by\n" + diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/datasource/BoneCPDataSourceProvider.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/datasource/BoneCPDataSourceProvider.java index 4ff2bb7..04e6977 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/datasource/BoneCPDataSourceProvider.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/datasource/BoneCPDataSourceProvider.java @@ -27,6 +27,7 @@ import javax.sql.DataSource; import java.sql.SQLException; import java.util.Properties; +import java.util.concurrent.TimeUnit; /** * DataSourceProvider for the BoneCP connection pool. @@ -51,7 +52,8 @@ public DataSource create(Configuration hdpConfig) throws SQLException { MetastoreConf.ConfVars.CONNECTION_POOLING_MAX_CONNECTIONS); Properties properties = DataSourceProvider.getPrefixedProperties(hdpConfig, BONECP); - long connectionTimeout = hdpConfig.getLong(CONNECTION_TIMEOUT_PROPERTY, 30000L); + long connectionTimeout = MetastoreConf.getTimeVar(hdpConfig, + MetastoreConf.ConfVars.CONNECTION_POOLING_CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS); String partitionCount = properties.getProperty(PARTITION_COUNT_PROPERTY, "1"); BoneCPConfig config = null; diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java index 6ffc24a..237f509 100644 --- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java +++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java @@ -27,6 +27,7 @@ import javax.sql.DataSource; import java.sql.SQLException; import java.util.Properties; +import java.util.concurrent.TimeUnit; /** * DataSourceProvider for the HikariCP connection pool. @@ -36,7 +37,6 @@ private static final Logger LOG = LoggerFactory.getLogger(HikariCPDataSourceProvider.class); public static final String HIKARI = "hikari"; - private static final String CONNECTION_TIMEOUT_PROPERTY= "hikari.connectionTimeout"; @Override public DataSource create(Configuration hdpConfig) throws SQLException { @@ -51,7 +51,8 @@ public DataSource create(Configuration hdpConfig) throws SQLException { Properties properties = replacePrefix( DataSourceProvider.getPrefixedProperties(hdpConfig, HIKARI)); - long connectionTimeout = hdpConfig.getLong(CONNECTION_TIMEOUT_PROPERTY, 30000L); + long connectionTimeout = MetastoreConf.getTimeVar(hdpConfig, + MetastoreConf.ConfVars.CONNECTION_POOLING_CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS); HikariConfig config = null; try { config = new HikariConfig(properties); @@ -64,6 +65,9 @@ public DataSource create(Configuration hdpConfig) throws SQLException { config.setPassword(passwd); //https://github.com/brettwooldridge/HikariCP config.setConnectionTimeout(connectionTimeout); + if (LOG.isDebugEnabled()) { + config.setLeakDetectionThreshold(connectionTimeout); + } return new HikariDataSource(config); }