Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (revision 1041295) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (working copy) @@ -23,6 +23,7 @@ import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; import static org.apache.hadoop.hive.metastore.MetaStoreUtils.validateName; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; @@ -35,6 +36,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.common.metrics.Metrics; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException; @@ -169,6 +171,15 @@ updateConnectionURL(hiveConf, null); createDefaultDB(); + + if (hiveConf.getBoolean("hive.metastore.metrics.enabled", false)){ + try { + Metrics.init(); + } catch (Exception e) { + // log exception, but ignore inability to start + LOG.error("Exception while instantiating metrics system: " +e); + } + } return true; } @@ -415,6 +426,39 @@ + "[" + join(partVals, ",") + "]"); } + + public String startFunction(String function, String extraLogInfo){ + incrementCounter(function); + logStartFunction(function + extraLogInfo); + try { + Metrics.startScope(function); + } catch (IOException e) { + LOG.debug("Exception when starting metrics scope" + e); + } + return function; + } + + public String startFunction(String function){ + return startFunction(function,""); + } + + public String startTableFunction(String function, String db, String tbl){ + return startFunction(function, " : db=" + db + " tbl=" + tbl); + } + + public String startPartitionFunction(String function, String db, String tbl, List partVals){ + return startFunction(function, " : db=" + db + " tbl=" + tbl + + "[" + join(partVals, ",") + "]" ); + } + + public void endFunction(String function){ + try { + Metrics.endScope(function); + } catch (IOException e) { + LOG.debug("Exception when closing metrics scope" + e); + } + } + @Override public int getStatus() { return fb_status.ALIVE; @@ -454,8 +498,7 @@ public void create_database(final Database db) throws AlreadyExistsException, InvalidObjectException, MetaException { - incrementCounter("create_database"); - logStartFunction("create_database: " + startFunction("create_database",": " + db.getName() + " " + db.getLocationUri() + " " + db.getDescription()); @@ -483,14 +526,14 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("create_database"); } } public Database get_database(final String name) throws NoSuchObjectException, MetaException { - incrementCounter("get_database"); - logStartFunction("get_database: " + name); - + startFunction("get_database",": " + name); Database db = null; try { db = executeWithRetry(new Command() { @@ -506,6 +549,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_database"); } return db; } @@ -536,9 +581,10 @@ public void drop_database(final String dbName, final boolean deleteData) throws NoSuchObjectException, InvalidOperationException, MetaException { - incrementCounter("drop_database"); - logStartFunction("drop_database: " + dbName); + + startFunction("drop_database",": " + dbName); if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName)) { + endFunction("drop_database"); throw new MetaException("Can not drop default database"); } @@ -559,12 +605,13 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_database"); } } public List get_databases(final String pattern) throws MetaException { - incrementCounter("get_databases"); - logStartFunction("get_databases: " + pattern); + startFunction("get_databases",": " + pattern); List ret = null; try { @@ -579,13 +626,14 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_databases"); } return ret; } public List get_all_databases() throws MetaException { - incrementCounter("get_all_databases"); - logStartFunction("get_all_databases"); + startFunction("get_all_databases"); List ret = null; try { @@ -600,6 +648,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_all_databases"); } return ret; } @@ -627,8 +677,7 @@ public boolean create_type(final Type type) throws AlreadyExistsException, MetaException, InvalidObjectException { - incrementCounter("create_type"); - logStartFunction("create_type: " + type.getName()); + startFunction("create_type",": " + type.getName()); Boolean ret = null; try { ret = executeWithRetry(new Command() { @@ -647,14 +696,15 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("create_type"); } return ret.booleanValue(); } public Type get_type(final String name) throws MetaException, NoSuchObjectException { - incrementCounter("get_type"); - logStartFunction("get_type: " + name); + startFunction("get_type",": " + name); Type ret; try { @@ -675,6 +725,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_type"); } return ret; } @@ -706,8 +758,7 @@ public boolean drop_type(final String name) throws MetaException { - incrementCounter("drop_type"); - logStartFunction("drop_type: " + name); + startFunction("drop_type",": " + name); Boolean ret = null; try { @@ -723,14 +774,16 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_type"); } return ret; } public Map get_type_all(String name) throws MetaException { - incrementCounter("get_type_all"); // TODO Auto-generated method stub - logStartFunction("get_type_all: " + name); + startFunction("get_type_all",": " + name); + endFunction("get_type_all"); throw new MetaException("Not yet implemented"); } @@ -803,8 +856,7 @@ public void create_table(final Table tbl) throws AlreadyExistsException, MetaException, InvalidObjectException { - incrementCounter("create_table"); - logStartFunction("create_table: db=" + tbl.getDbName() + " tbl=" + startFunction("create_table",": db=" + tbl.getDbName() + " tbl=" + tbl.getTableName()); try { executeWithRetry(new Command() { @@ -823,6 +875,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("create_table"); } } @@ -893,8 +947,7 @@ public void drop_table(final String dbname, final String name, final boolean deleteData) throws NoSuchObjectException, MetaException { - incrementCounter("drop_table"); - logStartTableFunction("drop_table", dbname, name); + startTableFunction("drop_table", dbname, name); try { executeWithRetry(new Command() { @@ -911,6 +964,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_table"); } } @@ -933,8 +988,7 @@ public Table get_table(final String dbname, final String name) throws MetaException, NoSuchObjectException { Table t = null; - incrementCounter("get_table"); - logStartTableFunction("get_table", dbname, name); + startTableFunction("get_table", dbname, name); try { t = executeWithRetry(new Command() { @Override @@ -954,14 +1008,15 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_table"); } return t; } public boolean set_table_parameters(String dbname, String name, Map params) throws NoSuchObjectException, MetaException { - incrementCounter("set_table_parameters"); - logStartTableFunction("set_table_parameters", dbname, name); + endFunction(startTableFunction("set_table_parameters", dbname, name)); // TODO Auto-generated method stub return false; } @@ -1034,8 +1089,7 @@ public Partition append_partition(final String dbName, final String tableName, final List part_vals) throws InvalidObjectException, AlreadyExistsException, MetaException { - incrementCounter("append_partition"); - logStartPartitionFunction("append_partition", dbName, tableName, part_vals); + startPartitionFunction("append_partition", dbName, tableName, part_vals); if (LOG.isDebugEnabled()) { for (String part : part_vals) { LOG.debug(part); @@ -1059,6 +1113,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("append_partition"); } return ret; } @@ -1086,7 +1142,7 @@ public int add_partitions(final List parts) throws MetaException, InvalidObjectException, AlreadyExistsException { - incrementCounter("add_partition"); + startFunction("add_partition"); if (parts.size() == 0) { return 0; } @@ -1109,6 +1165,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("add_partition"); } return ret; } @@ -1181,8 +1239,7 @@ public Partition add_partition(final Partition part) throws InvalidObjectException, AlreadyExistsException, MetaException { - incrementCounter("add_partition"); - logStartTableFunction("add_partition", part.getDbName(), part.getTableName()); + startTableFunction("add_partition", part.getDbName(), part.getTableName()); Partition ret = null; try { @@ -1201,6 +1258,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("add_partition"); } return ret; @@ -1262,8 +1321,7 @@ public boolean drop_partition(final String db_name, final String tbl_name, final List part_vals, final boolean deleteData) throws NoSuchObjectException, MetaException, TException { - incrementCounter("drop_partition"); - logStartPartitionFunction("drop_partition", db_name, tbl_name, part_vals); + startPartitionFunction("drop_partition", db_name, tbl_name, part_vals); LOG.info("Partition values:" + part_vals); Boolean ret = null; @@ -1284,6 +1342,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_partition"); } return ret.booleanValue(); @@ -1291,8 +1351,7 @@ public Partition get_partition(final String db_name, final String tbl_name, final List part_vals) throws MetaException, NoSuchObjectException { - incrementCounter("get_partition"); - logStartPartitionFunction("get_partition", db_name, tbl_name, part_vals); + startPartitionFunction("get_partition", db_name, tbl_name, part_vals); Partition ret = null; try { @@ -1309,14 +1368,15 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_partition"); } return ret; } public List get_partitions(final String db_name, final String tbl_name, final short max_parts) throws NoSuchObjectException, MetaException { - incrementCounter("get_partitions"); - logStartTableFunction("get_partitions", db_name, tbl_name); + startTableFunction("get_partitions", db_name, tbl_name); List ret = null; try { @@ -1333,6 +1393,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_partitions"); } return ret; @@ -1340,8 +1402,7 @@ public List get_partition_names(final String db_name, final String tbl_name, final short max_parts) throws MetaException { - incrementCounter("get_partition_names"); - logStartTableFunction("get_partition_names", db_name, tbl_name); + startTableFunction("get_partition_names", db_name, tbl_name); List ret = null; try { @@ -1356,6 +1417,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_partition_names"); } return ret; } @@ -1380,8 +1443,7 @@ public void alter_partition(final String db_name, final String tbl_name, final Partition new_part) throws InvalidOperationException, MetaException, TException { - incrementCounter("alter_partition"); - logStartTableFunction("alter_partition", db_name, tbl_name); + startTableFunction("alter_partition", db_name, tbl_name); LOG.info("Partition values:" + new_part.getValues()); try { @@ -1401,21 +1463,22 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("alter_partition"); } return; } public boolean create_index(Index index_def) throws IndexAlreadyExistsException, MetaException { - incrementCounter("create_index"); + endFunction(startFunction("create_index")); // TODO Auto-generated method stub throw new MetaException("Not yet implemented"); } public void alter_index(final String dbname, final String base_table_name, final String index_name, final Index newIndex) throws InvalidOperationException, MetaException { - incrementCounter("alter_index"); - logStartFunction("alter_index: db=" + dbname + " base_tbl=" + base_table_name + startFunction("alter_index", ": db=" + dbname + " base_tbl=" + base_table_name + " idx=" + index_name + " newidx=" + newIndex.getIndexName()); newIndex.putToParameters(Constants.DDL_TIME, Long.toString(System .currentTimeMillis() / 1000)); @@ -1435,20 +1498,20 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("alter_index"); } return; } public String getVersion() throws TException { - incrementCounter("getVersion"); - logStartFunction("getVersion"); + endFunction(startFunction("getVersion")); return "3.0"; } public void alter_table(final String dbname, final String name, final Table newTable) throws InvalidOperationException, MetaException { - incrementCounter("alter_table"); - logStartFunction("alter_table: db=" + dbname + " tbl=" + name + startFunction("alter_table",": db=" + dbname + " tbl=" + name + " newtbl=" + newTable.getTableName()); // Update the time if it hasn't been specified. @@ -1458,6 +1521,7 @@ .currentTimeMillis() / 1000)); } + try { executeWithRetry(new Command() { @Override @@ -1473,13 +1537,14 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("alter_table"); } } public List get_tables(final String dbname, final String pattern) throws MetaException { - incrementCounter("get_tables"); - logStartFunction("get_tables: db=" + dbname + " pat=" + pattern); + startFunction("get_tables",": db=" + dbname + " pat=" + pattern); List ret; try { @@ -1494,13 +1559,14 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_tables"); } return ret; } public List get_all_tables(final String dbname) throws MetaException { - incrementCounter("get_all_tables"); - logStartFunction("get_all_tables: db=" + dbname); + startFunction("get_all_tables",": db=" + dbname); List ret; try { @@ -1515,35 +1581,40 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_all_tables"); } return ret; } public List get_fields(String db, String tableName) throws MetaException, UnknownTableException, UnknownDBException { - incrementCounter("get_fields"); - logStartFunction("get_fields: db=" + db + "tbl=" + tableName); + startFunction("get_fields",": db=" + db + "tbl=" + tableName); String[] names = tableName.split("\\."); String base_table_name = names[0]; Table tbl; try { - tbl = get_table(db, base_table_name); - } catch (NoSuchObjectException e) { - throw new UnknownTableException(e.getMessage()); - } - boolean getColsFromSerDe = SerDeUtils.shouldGetColsFromSerDe( - tbl.getSd().getSerdeInfo().getSerializationLib()); - if (!getColsFromSerDe) { - return tbl.getSd().getCols(); - } else { try { - Deserializer s = MetaStoreUtils.getDeserializer(hiveConf, tbl); - return MetaStoreUtils.getFieldsFromDeserializer(tableName, s); - } catch (SerDeException e) { - StringUtils.stringifyException(e); - throw new MetaException(e.getMessage()); + tbl = get_table(db, base_table_name); + } catch (NoSuchObjectException e) { + throw new UnknownTableException(e.getMessage()); } + boolean getColsFromSerDe = SerDeUtils.shouldGetColsFromSerDe( + tbl.getSd().getSerdeInfo().getSerializationLib()); + if (!getColsFromSerDe) { + return tbl.getSd().getCols(); + } else { + try { + Deserializer s = MetaStoreUtils.getDeserializer(hiveConf, tbl); + return MetaStoreUtils.getFieldsFromDeserializer(tableName, s); + } catch (SerDeException e) { + StringUtils.stringifyException(e); + throw new MetaException(e.getMessage()); + } + } + } finally { + endFunction("get_fields"); } } @@ -1562,29 +1633,32 @@ */ public List get_schema(String db, String tableName) throws MetaException, UnknownTableException, UnknownDBException { - incrementCounter("get_schema"); - logStartFunction("get_schema: db=" + db + "tbl=" + tableName); - String[] names = tableName.split("\\."); - String base_table_name = names[0]; - - Table tbl; + startFunction("get_schema",": db=" + db + "tbl=" + tableName); try { - tbl = get_table(db, base_table_name); - } catch (NoSuchObjectException e) { - throw new UnknownTableException(e.getMessage()); - } - List fieldSchemas = get_fields(db, base_table_name); + String[] names = tableName.split("\\."); + String base_table_name = names[0]; - if (tbl == null || fieldSchemas == null) { - throw new UnknownTableException(tableName + " doesn't exist"); - } + Table tbl; + try { + tbl = get_table(db, base_table_name); + } catch (NoSuchObjectException e) { + throw new UnknownTableException(e.getMessage()); + } + List fieldSchemas = get_fields(db, base_table_name); - if (tbl.getPartitionKeys() != null) { - // Combine the column field schemas and the partition keys to create the - // whole schema - fieldSchemas.addAll(tbl.getPartitionKeys()); + if (tbl == null || fieldSchemas == null) { + throw new UnknownTableException(tableName + " doesn't exist"); + } + + if (tbl.getPartitionKeys() != null) { + // Combine the column field schemas and the partition keys to create the + // whole schema + fieldSchemas.addAll(tbl.getPartitionKeys()); + } + return fieldSchemas; + } finally { + endFunction("get_schema"); } - return fieldSchemas; } public String getCpuProfile(int profileDurationInSec) throws TException { @@ -1599,28 +1673,31 @@ */ public String get_config_value(String name, String defaultValue) throws TException, ConfigValSecurityException { - incrementCounter("get_config_value"); - logStartFunction("get_config_value: name=" + name + " defaultValue=" + startFunction("get_config_value",": name=" + name + " defaultValue=" + defaultValue); - if (name == null) { - return defaultValue; - } - // Allow only keys that start with hive.*, hdfs.*, mapred.* for security - // i.e. don't allow access to db password - if (!Pattern.matches("(hive|hdfs|mapred).*", name)) { - throw new ConfigValSecurityException("For security reasons, the " - + "config key " + name + " cannot be accessed"); - } + try { + if (name == null) { + return defaultValue; + } + // Allow only keys that start with hive.*, hdfs.*, mapred.* for security + // i.e. don't allow access to db password + if (!Pattern.matches("(hive|hdfs|mapred).*", name)) { + throw new ConfigValSecurityException("For security reasons, the " + + "config key " + name + " cannot be accessed"); + } - String toReturn = defaultValue; - try { - toReturn = hiveConf.get(name, defaultValue); - } catch (RuntimeException e) { - LOG.error(threadLocalId.get().toString() + ": " - + "RuntimeException thrown in get_config_value - msg: " - + e.getMessage() + " cause: " + e.getCause()); + String toReturn = defaultValue; + try { + toReturn = hiveConf.get(name, defaultValue); + } catch (RuntimeException e) { + LOG.error(threadLocalId.get().toString() + ": " + + "RuntimeException thrown in get_config_value - msg: " + + e.getMessage() + " cause: " + e.getCause()); + } + return toReturn; + } finally { + endFunction("get_config_value"); } - return toReturn; } private List getPartValsFromName(RawStore ms, String dbName, String tblName, @@ -1669,8 +1746,7 @@ public Partition get_partition_by_name(final String db_name,final String tbl_name, final String part_name) throws MetaException, NoSuchObjectException, TException { - incrementCounter("get_partition_by_name"); - logStartFunction("get_partition_by_name: db=" + db_name + " tbl=" + startFunction("get_partition_by_name",": db=" + db_name + " tbl=" + tbl_name + " part=" + part_name); Partition ret = null; @@ -1691,6 +1767,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_partition_by_name"); } return ret; } @@ -1698,8 +1776,7 @@ public Partition append_partition_by_name(final String db_name, final String tbl_name, final String part_name) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { - incrementCounter("append_partition_by_name"); - logStartFunction("append_partition_by_name: db=" + db_name + " tbl=" + startFunction("append_partition_by_name",": db=" + db_name + " tbl=" + tbl_name + " part=" + part_name); Partition ret = null; @@ -1722,6 +1799,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("append_partition_by_name"); } return ret; } @@ -1745,8 +1824,7 @@ public boolean drop_partition_by_name(final String db_name, final String tbl_name, final String part_name, final boolean deleteData) throws NoSuchObjectException, MetaException, TException { - incrementCounter("drop_partition_by_name"); - logStartFunction("drop_partition_by_name: db=" + db_name + " tbl=" + startFunction("drop_partition_by_name",": db=" + db_name + " tbl=" + tbl_name + " part=" + part_name); Boolean ret = null; @@ -1767,6 +1845,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_partition_by_name"); } return ret.booleanValue(); @@ -1776,93 +1856,98 @@ public List get_partitions_ps(final String db_name, final String tbl_name, final List part_vals, final short max_parts) throws MetaException, TException { - incrementCounter("get_partitions_ps"); - logStartPartitionFunction("get_partitions_ps", db_name, tbl_name, part_vals); - - Table t; + startPartitionFunction("get_partitions_ps", db_name, tbl_name, part_vals); try { - t = get_table(db_name, tbl_name); - } catch (NoSuchObjectException e) { - throw new MetaException(e.getMessage()); - } + Table t; + try { + t = get_table(db_name, tbl_name); + } catch (NoSuchObjectException e) { + throw new MetaException(e.getMessage()); + } - if (part_vals.size() > t.getPartitionKeys().size()) { - throw new MetaException("Incorrect number of partition values"); - } - // Create a map from the partition column name to the partition value - Map partKeyToValues = new LinkedHashMap(); - int i=0; - for (String value : part_vals) { - String col = t.getPartitionKeys().get(i).getName(); - if (value.length() > 0) { - partKeyToValues.put(col, value); + if (part_vals.size() > t.getPartitionKeys().size()) { + throw new MetaException("Incorrect number of partition values"); } - i++; - } - final String filter = MetaStoreUtils.makeFilterStringFromMap(partKeyToValues); + // Create a map from the partition column name to the partition value + Map partKeyToValues = new LinkedHashMap(); + int i=0; + for (String value : part_vals) { + String col = t.getPartitionKeys().get(i).getName(); + if (value.length() > 0) { + partKeyToValues.put(col, value); + } + i++; + } + final String filter = MetaStoreUtils.makeFilterStringFromMap(partKeyToValues); - List ret = null; - try { - ret = executeWithRetry(new Command>() { - @Override - List run(RawStore ms) throws Exception { - return ms.getPartitionsByFilter(db_name, tbl_name, filter, max_parts); - } - }); - } catch (MetaException e) { - throw e; - } catch (Exception e) { - assert(e instanceof RuntimeException); - throw (RuntimeException)e; + List ret = null; + try { + ret = executeWithRetry(new Command>() { + @Override + List run(RawStore ms) throws Exception { + return ms.getPartitionsByFilter(db_name, tbl_name, filter, max_parts); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + assert(e instanceof RuntimeException); + throw (RuntimeException)e; + } + return ret; } - - return ret; + finally { + endFunction("get_partitions_ps"); + } } @Override public List get_partition_names_ps(final String db_name, final String tbl_name, final List part_vals, final short max_parts) throws MetaException, TException { - incrementCounter("get_partition_names_ps"); - logStartPartitionFunction("get_partitions_names_ps", db_name, tbl_name, part_vals); - Table t; + startPartitionFunction("get_partitions_names_ps", db_name, tbl_name, part_vals); try { - t = get_table(db_name, tbl_name); - } catch (NoSuchObjectException e) { - throw new MetaException(e.getMessage()); - } + Table t; + try { + t = get_table(db_name, tbl_name); + } catch (NoSuchObjectException e) { + throw new MetaException(e.getMessage()); + } - if (part_vals.size() > t.getPartitionKeys().size()) { - throw new MetaException("Incorrect number of partition values"); - } - // Create a map from the partition column name to the partition value - Map partKeyToValues = new LinkedHashMap(); - int i=0; - for (String value : part_vals) { - String col = t.getPartitionKeys().get(i).getName(); - if (value.length() > 0) { - partKeyToValues.put(col, value); + if (part_vals.size() > t.getPartitionKeys().size()) { + throw new MetaException("Incorrect number of partition values"); } - i++; - } - final String filter = MetaStoreUtils.makeFilterStringFromMap(partKeyToValues); + // Create a map from the partition column name to the partition value + Map partKeyToValues = new LinkedHashMap(); + int i=0; + for (String value : part_vals) { + String col = t.getPartitionKeys().get(i).getName(); + if (value.length() > 0) { + partKeyToValues.put(col, value); + } + i++; + } + final String filter = MetaStoreUtils.makeFilterStringFromMap(partKeyToValues); - List ret = null; - try { - ret = executeWithRetry(new Command>() { - @Override - List run(RawStore ms) throws Exception { - return ms.listPartitionNamesByFilter(db_name, tbl_name, filter, max_parts); - } - }); - } catch (MetaException e) { - throw e; - } catch (Exception e) { - assert(e instanceof RuntimeException); - throw (RuntimeException)e; + List ret = null; + try { + ret = executeWithRetry(new Command>() { + @Override + List run(RawStore ms) throws Exception { + return ms.listPartitionNamesByFilter(db_name, tbl_name, filter, max_parts); + } + }); + } catch (MetaException e) { + throw e; + } catch (Exception e) { + assert(e instanceof RuntimeException); + throw (RuntimeException)e; + } + + return ret; + } finally { + endFunction("get_partitions_names_ps"); } - - return ret; } @Override @@ -1889,8 +1974,7 @@ @Override public Index add_index(final Index newIndex, final Table indexTable) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { - incrementCounter("add_partition"); - logStartFunction("add_index: db=" + newIndex.getDbName() + " tbl=" + startFunction("add_index",": db=" + newIndex.getDbName() + " tbl=" + newIndex.getOrigTableName() + " index=" + newIndex.getIndexName()); Index ret = null; try { @@ -1909,6 +1993,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("add_index"); } return ret; } @@ -1974,8 +2060,7 @@ public boolean drop_index_by_name(final String dbName, final String tblName, final String indexName, final boolean deleteData) throws NoSuchObjectException, MetaException, TException { - incrementCounter("drop_index_by_name"); - logStartFunction("drop_index_by_name: db=" + dbName + " tbl=" + startFunction("drop_index_by_name",": db=" + dbName + " tbl=" + tblName + " index=" + indexName); Boolean ret = null; @@ -1996,6 +2081,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_index_by_name"); } return ret.booleanValue(); @@ -2052,8 +2139,7 @@ final String indexName) throws MetaException, NoSuchObjectException, TException { - incrementCounter("get_index_by_name"); - logStartFunction("get_index_by_name: db=" + dbName + " tbl=" + startFunction("get_index_by_name",": db=" + dbName + " tbl=" + tblName + " index=" + indexName); Index ret = null; @@ -2074,6 +2160,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_index_by_name"); } return ret; } @@ -2093,8 +2181,7 @@ @Override public List get_index_names(final String dbName, final String tblName, final short maxIndexes) throws MetaException, TException { - incrementCounter("get_index_names"); - logStartTableFunction("get_index_names", dbName, tblName); + startTableFunction("get_index_names", dbName, tblName); List ret = null; try { @@ -2109,6 +2196,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_index_names"); } return ret; } @@ -2117,8 +2206,7 @@ public List get_indexes(final String dbName, final String tblName, final short maxIndexes) throws NoSuchObjectException, MetaException, TException { - incrementCounter("get_indexes"); - logStartTableFunction("get_indexes", dbName, tblName); + startTableFunction("get_indexes", dbName, tblName); List ret = null; try { @@ -2133,6 +2221,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_indexes"); } return ret; } @@ -2141,8 +2231,7 @@ public List get_partitions_by_filter(final String dbName, final String tblName, final String filter, final short maxParts) throws MetaException, NoSuchObjectException, TException { - incrementCounter("get_partitions_by_filter"); - logStartTableFunction("get_partitions_by_filter", dbName, tblName); + startTableFunction("get_partitions_by_filter", dbName, tblName); List ret = null; try { @@ -2159,6 +2248,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_partitions_by_filter"); } return ret; } Index: common/src/java/org/apache/hadoop/hive/common/metrics/Metrics.java =================================================================== --- common/src/java/org/apache/hadoop/hive/common/metrics/Metrics.java (revision 0) +++ common/src/java/org/apache/hadoop/hive/common/metrics/Metrics.java (revision 0) @@ -0,0 +1,161 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.metrics; + +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.util.HashMap; +import java.util.Map; + +import javax.management.MBeanServer; +import javax.management.ObjectName; + +/** + * Metrics Subsystem - allows exposure of a number of named parameters/counters + * via jmx, intended to be used as a static subsystem + * + * Has a couple of primary ways it can be used: + * (i) Using the set and get methods to set and get named parameters + * (ii) Using the incrementCounter method to increment and set named + * parameters in one shot, rather than having to make a get and then a set. + * (iii) Using the startScope and endScope methods to start and end + * named "scopes" that record the number of times they've been instantiated + * and amount of time(in milliseconds) spent inside the scopes. + */ +public class Metrics { + + /** + * MetricsScope : A class that encapsulates an idea of a metered scope. + * Instantiating a named scope and then closing it exposes two counters: + * (i) a "number of calls" counter ( <name>.n ), and + * (ii) a "total number of milliseconds spent between scope open and close" counter. ( <name>.t) + */ + public class MetricsScope { + + String name = null; + boolean isOpen = false; + Long startTime = null; + String numCounter = null; + String timeCounter = null; + + //disable default ctor - so that it can't be created without a name + @SuppressWarnings("unused") + private MetricsScope(){ } + + /** + * Instantiates a named scope - intended to only be called by Metrics, so locally scoped. + * @param name - name of the variable + * @throws IOException + */ + MetricsScope(String name) throws IOException { + this.name = name; + Metrics.incrementCounter(name+".n"); + isOpen = true; + startTime = System.currentTimeMillis(); + } + + /** + * Closes scope, and records the time taken + * @throws IOException + */ + public void close() throws IOException{ + if (isOpen){ + Long endTime = System.currentTimeMillis(); + Metrics.incrementCounter(name+".t",endTime-startTime); + } + isOpen = false; + } + +// /* +// * Yes, never rely on finalize, but we need to record this if so. +// * Maybe we should consider recording the time taken to a different variable? +// */ +// @Override +// protected void finalize() throws Throwable{ +// close(); +// super.finalize(); +// } + } + + + static MetricsMBean metrics = new MetricsMBeanImpl(); + static Map scopes = new HashMap(); + static boolean initialized = false; + + static Metrics m = new Metrics(); + + public static void init() throws Exception { + if (!initialized){ + MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); + ObjectName oname = new ObjectName("org.apache.hadoop.hive.common.metrics:type=MetricsMBean"); + mbs.registerMBean(metrics, oname); + initialized = true; + } + } + + public static void incrementCounter(String name) throws IOException{ + if (!initialized){ return; } + incrementCounter(name,Long.valueOf(1)); + } + + public static void incrementCounter(String name, long increment) throws IOException{ + if (!initialized){ return; } + if (!metrics.hasKey(name)){ + set(name,Long.valueOf(1)); + }else{ + set(name, ((Long)get(name))+increment); + } + } + + public static void set(String name, Object value) throws IOException{ + if (!initialized){ return; } + metrics.put(name,value); + } + + public static Object get(String name) throws IOException{ + if (!initialized){ return null; } + return metrics.get(name); + } + + public static MetricsScope startScope(String name) throws IOException{ + if (!initialized){ return null; } + if (scopes.containsKey(name)){ + scopes.get(name).close(); + } + scopes.put(name, m.new MetricsScope(name)); + return scopes.get(name); + } + + public static MetricsScope getScope(String name) throws IOException { + if (!initialized) { return null; } + if (scopes.containsKey(name)){ + return scopes.get(name); + }else{ + throw new IOException("No metrics scope named "+name); + } + } + + public static void endScope(String name) throws IOException{ + if (!initialized){ return; } + if (scopes.containsKey(name)){ + scopes.get(name).close(); + scopes.remove(name); + } + } + +} Index: common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java =================================================================== --- common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java (revision 0) +++ common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java (revision 0) @@ -0,0 +1,51 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.metrics; + +import java.io.IOException; + +import javax.management.DynamicMBean; + +/** + * MBean definition for metrics tracking from jmx + */ + +public interface MetricsMBean extends DynamicMBean { + + /** + * Check if we're tracking a certain named key/metric + */ + public abstract boolean hasKey(String name); + + /** + * Add a key/metric and its value to track + * @param name Name of the key/metric + * @param value value associated with the key + * @throws Exception + */ + public abstract void put(String name, Object value) throws IOException; + + /** + * + * @param name + * @return + * @throws Exception + */ + public abstract Object get(String name) throws IOException; + +} \ No newline at end of file Index: common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java =================================================================== --- common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java (revision 0) +++ common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java (revision 0) @@ -0,0 +1,150 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.metrics; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import javax.management.Attribute; +import javax.management.AttributeList; +import javax.management.AttributeNotFoundException; +import javax.management.InvalidAttributeValueException; +import javax.management.MBeanAttributeInfo; +import javax.management.MBeanConstructorInfo; +import javax.management.MBeanException; +import javax.management.MBeanInfo; +import javax.management.MBeanNotificationInfo; +import javax.management.MBeanOperationInfo; +import javax.management.ReflectionException; + + +public class MetricsMBeanImpl implements MetricsMBean { + + Map metricsMap = new HashMap(); + + MBeanAttributeInfo[] attributeInfos; + boolean dirtyAttributeInfoCache = true; + + MBeanConstructorInfo[] ctors = null; + MBeanOperationInfo[] ops = null; + MBeanNotificationInfo[] notifs = null; + + @Override + public Object getAttribute(String arg0) throws AttributeNotFoundException, + MBeanException, ReflectionException { + if (metricsMap.containsKey(arg0)){ + return metricsMap.get(arg0); + }else{ + throw new AttributeNotFoundException("Key ["+arg0+"] not found/tracked"); + } + } + + @Override + public AttributeList getAttributes(String[] arg0) { + AttributeList results = new AttributeList(); + for (String key : arg0){ + results.add(new Attribute(key,metricsMap.get(key))); + } + return results; + } + + @Override + public MBeanInfo getMBeanInfo() { + if (dirtyAttributeInfoCache){ + attributeInfos = new MBeanAttributeInfo[metricsMap.size()]; + int i = 0; + for (String key : metricsMap.keySet()){ + attributeInfos[i] = new MBeanAttributeInfo(key,metricsMap.get(key).getClass().getName(),key,true,false,false); + i++; + } + dirtyAttributeInfoCache = false; + } + return new MBeanInfo(this.getClass().getName(),"metrics information",attributeInfos,ctors,ops,notifs); + } + + @Override + public Object invoke(String arg0, Object[] arg1, String[] arg2) + throws MBeanException, ReflectionException { + // no invocations. + return null; + } + + @Override + public void setAttribute(Attribute attr) throws AttributeNotFoundException, + InvalidAttributeValueException, MBeanException, ReflectionException { + try { + put(attr.getName(),attr.getValue()); + } catch (Exception e) { + throw new MBeanException(e); + } + } + + @Override + public AttributeList setAttributes(AttributeList arg0) { + AttributeList attributesSet = new AttributeList(); + for (Attribute attr : arg0.asList()){ + try { + setAttribute(attr); + attributesSet.add(attr); + } catch (AttributeNotFoundException e) { + // ignore exception - we simply don't add this attribute back in to the resultant set. + } catch (InvalidAttributeValueException e) { + // ditto + } catch (MBeanException e) { + // likewise + } catch (ReflectionException e) { + // and again, one last time. + } + } + return attributesSet; + } + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.common.metrics.MetricsMBean#hasKey(java.lang.String) + */ + public boolean hasKey(String name) { + return metricsMap.containsKey(name); + } + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.common.metrics.MetricsMBean#put(java.lang.String, java.lang.Object) + */ + public void put(String name, Object value) throws IOException { + if (!metricsMap.containsKey(name)){ + dirtyAttributeInfoCache = true; + } + metricsMap.put(name,value); + } + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.common.metrics.MetricsMBean#get(java.lang.String) + */ + public Object get(String name) throws IOException { + try { + return getAttribute(name); + } catch (AttributeNotFoundException e) { + throw new IOException(e); + } catch (MBeanException e) { + throw new IOException(e); + } catch (ReflectionException e) { + throw new IOException(e); + } + } + +}