Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (revision 1054860) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (working copy) @@ -23,6 +23,7 @@ import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; import static org.apache.hadoop.hive.metastore.MetaStoreUtils.validateName; +import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; @@ -35,6 +36,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.common.metrics.Metrics; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException; @@ -172,6 +174,15 @@ updateConnectionURL(hiveConf, null); createDefaultDB(); + + if (hiveConf.getBoolean("hive.metastore.metrics.enabled", false)){ + try { + Metrics.init(); + } catch (Exception e) { + // log exception, but ignore inability to start + LOG.error("Exception while instantiating metrics system: " +e); + } + } return true; } @@ -405,19 +416,42 @@ } } - private void logStartFunction(String m) { + private void logInfo(String m) { LOG.info(threadLocalId.get().toString() + ": " + m); } - private void logStartTableFunction(String f, String db, String tbl) { - LOG.info(threadLocalId.get().toString() + ": " + f + " : db=" + db + " tbl=" + tbl); + public String startFunction(String function, String extraLogInfo){ + incrementCounter(function); + logInfo(function + extraLogInfo); + try { + Metrics.startScope(function); + } catch (IOException e) { + LOG.debug("Exception when starting metrics scope" + e); + } + return function; } - private void logStartPartitionFunction(String f, String db, String tbl, List partVals) { - LOG.info(threadLocalId.get().toString() + ": " + f + " : db=" + db + " tbl=" + tbl - + "[" + join(partVals, ",") + "]"); + public String startFunction(String function){ + return startFunction(function,""); } + public String startTableFunction(String function, String db, String tbl){ + return startFunction(function, " : db=" + db + " tbl=" + tbl); + } + + public String startPartitionFunction(String function, String db, String tbl, List partVals){ + return startFunction(function, " : db=" + db + " tbl=" + tbl + + "[" + join(partVals, ",") + "]" ); + } + + public void endFunction(String function){ + try { + Metrics.endScope(function); + } catch (IOException e) { + LOG.debug("Exception when closing metrics scope" + e); + } + } + @Override public fb_status getStatus() { return fb_status.ALIVE; @@ -425,7 +459,7 @@ @Override public void shutdown() { - logStartFunction("Shutting down the object store..."); + logInfo("Shutting down the object store..."); RawStore ms = threadLocalMS.get(); if (ms != null) { ms.shutdown(); @@ -457,8 +491,7 @@ public void create_database(final Database db) throws AlreadyExistsException, InvalidObjectException, MetaException { - incrementCounter("create_database"); - logStartFunction("create_database: " + startFunction("create_database",": " + db.getName() + " " + db.getLocationUri() + " " + db.getDescription()); @@ -486,14 +519,14 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("create_database"); } } public Database get_database(final String name) throws NoSuchObjectException, MetaException { - incrementCounter("get_database"); - logStartFunction("get_database: " + name); - + startFunction("get_database",": " + name); Database db = null; try { db = executeWithRetry(new Command() { @@ -509,6 +542,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_database"); } return db; } @@ -539,9 +574,10 @@ public void drop_database(final String dbName, final boolean deleteData) throws NoSuchObjectException, InvalidOperationException, MetaException { - incrementCounter("drop_database"); - logStartFunction("drop_database: " + dbName); + + startFunction("drop_database",": " + dbName); if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName)) { + endFunction("drop_database"); throw new MetaException("Can not drop default database"); } @@ -562,12 +598,13 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_database"); } } public List get_databases(final String pattern) throws MetaException { - incrementCounter("get_databases"); - logStartFunction("get_databases: " + pattern); + startFunction("get_databases",": " + pattern); List ret = null; try { @@ -582,13 +619,14 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_databases"); } return ret; } public List get_all_databases() throws MetaException { - incrementCounter("get_all_databases"); - logStartFunction("get_all_databases"); + startFunction("get_all_databases"); List ret = null; try { @@ -603,6 +641,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_all_databases"); } return ret; } @@ -630,8 +670,7 @@ public boolean create_type(final Type type) throws AlreadyExistsException, MetaException, InvalidObjectException { - incrementCounter("create_type"); - logStartFunction("create_type: " + type.getName()); + startFunction("create_type",": " + type.getName()); Boolean ret = null; try { ret = executeWithRetry(new Command() { @@ -650,14 +689,15 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("create_type"); } return ret.booleanValue(); } public Type get_type(final String name) throws MetaException, NoSuchObjectException { - incrementCounter("get_type"); - logStartFunction("get_type: " + name); + startFunction("get_type",": " + name); Type ret; try { @@ -678,6 +718,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_type"); } return ret; } @@ -709,8 +751,7 @@ public boolean drop_type(final String name) throws MetaException { - incrementCounter("drop_type"); - logStartFunction("drop_type: " + name); + startFunction("drop_type",": " + name); Boolean ret = null; try { @@ -726,14 +767,16 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_type"); } return ret; } public Map get_type_all(String name) throws MetaException { - incrementCounter("get_type_all"); // TODO Auto-generated method stub - logStartFunction("get_type_all: " + name); + startFunction("get_type_all",": " + name); + endFunction("get_type_all"); throw new MetaException("Not yet implemented"); } @@ -806,8 +849,7 @@ public void create_table(final Table tbl) throws AlreadyExistsException, MetaException, InvalidObjectException { - incrementCounter("create_table"); - logStartFunction("create_table: db=" + tbl.getDbName() + " tbl=" + startFunction("create_table",": db=" + tbl.getDbName() + " tbl=" + tbl.getTableName()); try { executeWithRetry(new Command() { @@ -826,6 +868,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("create_table"); } } @@ -896,8 +940,7 @@ public void drop_table(final String dbname, final String name, final boolean deleteData) throws NoSuchObjectException, MetaException { - incrementCounter("drop_table"); - logStartTableFunction("drop_table", dbname, name); + startTableFunction("drop_table", dbname, name); try { executeWithRetry(new Command() { @@ -914,6 +957,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_table"); } } @@ -936,8 +981,7 @@ public Table get_table(final String dbname, final String name) throws MetaException, NoSuchObjectException { Table t = null; - incrementCounter("get_table"); - logStartTableFunction("get_table", dbname, name); + startTableFunction("get_table", dbname, name); try { t = executeWithRetry(new Command() { @Override @@ -957,14 +1001,15 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_table"); } return t; } public boolean set_table_parameters(String dbname, String name, Map params) throws NoSuchObjectException, MetaException { - incrementCounter("set_table_parameters"); - logStartTableFunction("set_table_parameters", dbname, name); + endFunction(startTableFunction("set_table_parameters", dbname, name)); // TODO Auto-generated method stub return false; } @@ -1037,8 +1082,7 @@ public Partition append_partition(final String dbName, final String tableName, final List part_vals) throws InvalidObjectException, AlreadyExistsException, MetaException { - incrementCounter("append_partition"); - logStartPartitionFunction("append_partition", dbName, tableName, part_vals); + startPartitionFunction("append_partition", dbName, tableName, part_vals); if (LOG.isDebugEnabled()) { for (String part : part_vals) { LOG.debug(part); @@ -1062,6 +1106,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("append_partition"); } return ret; } @@ -1070,7 +1116,8 @@ throws MetaException, InvalidObjectException, AlreadyExistsException { String db = parts.get(0).getDbName(); String tbl = parts.get(0).getTableName(); - logStartTableFunction("add_partitions", db, tbl); + logInfo("add_partitions : db=" + db + " tbl=" + tbl); + boolean success = false; try { ms.openTransaction(); @@ -1089,7 +1136,7 @@ public int add_partitions(final List parts) throws MetaException, InvalidObjectException, AlreadyExistsException { - incrementCounter("add_partition"); + startFunction("add_partition"); if (parts.size() == 0) { return 0; } @@ -1112,6 +1159,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("add_partition"); } return ret; } @@ -1184,8 +1233,7 @@ public Partition add_partition(final Partition part) throws InvalidObjectException, AlreadyExistsException, MetaException { - incrementCounter("add_partition"); - logStartTableFunction("add_partition", part.getDbName(), part.getTableName()); + startTableFunction("add_partition", part.getDbName(), part.getTableName()); Partition ret = null; try { @@ -1204,6 +1252,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("add_partition"); } return ret; @@ -1265,8 +1315,7 @@ public boolean drop_partition(final String db_name, final String tbl_name, final List part_vals, final boolean deleteData) throws NoSuchObjectException, MetaException, TException { - incrementCounter("drop_partition"); - logStartPartitionFunction("drop_partition", db_name, tbl_name, part_vals); + startPartitionFunction("drop_partition", db_name, tbl_name, part_vals); LOG.info("Partition values:" + part_vals); Boolean ret = null; @@ -1287,6 +1336,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_partition"); } return ret.booleanValue(); @@ -1294,8 +1345,7 @@ public Partition get_partition(final String db_name, final String tbl_name, final List part_vals) throws MetaException, NoSuchObjectException { - incrementCounter("get_partition"); - logStartPartitionFunction("get_partition", db_name, tbl_name, part_vals); + startPartitionFunction("get_partition", db_name, tbl_name, part_vals); Partition ret = null; try { @@ -1312,14 +1362,15 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_partition"); } return ret; } public List get_partitions(final String db_name, final String tbl_name, final short max_parts) throws NoSuchObjectException, MetaException { - incrementCounter("get_partitions"); - logStartTableFunction("get_partitions", db_name, tbl_name); + startTableFunction("get_partitions", db_name, tbl_name); List ret = null; try { @@ -1336,6 +1387,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_partitions"); } return ret; @@ -1343,8 +1396,7 @@ public List get_partition_names(final String db_name, final String tbl_name, final short max_parts) throws MetaException { - incrementCounter("get_partition_names"); - logStartTableFunction("get_partition_names", db_name, tbl_name); + startTableFunction("get_partition_names", db_name, tbl_name); List ret = null; try { @@ -1359,6 +1411,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_partition_names"); } return ret; } @@ -1383,8 +1437,7 @@ public void alter_partition(final String db_name, final String tbl_name, final Partition new_part) throws InvalidOperationException, MetaException, TException { - incrementCounter("alter_partition"); - logStartTableFunction("alter_partition", db_name, tbl_name); + startTableFunction("alter_partition", db_name, tbl_name); LOG.info("Partition values:" + new_part.getValues()); try { @@ -1404,21 +1457,22 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("alter_partition"); } return; } public boolean create_index(Index index_def) throws IndexAlreadyExistsException, MetaException { - incrementCounter("create_index"); + endFunction(startFunction("create_index")); // TODO Auto-generated method stub throw new MetaException("Not yet implemented"); } public void alter_index(final String dbname, final String base_table_name, final String index_name, final Index newIndex) throws InvalidOperationException, MetaException { - incrementCounter("alter_index"); - logStartFunction("alter_index: db=" + dbname + " base_tbl=" + base_table_name + startFunction("alter_index", ": db=" + dbname + " base_tbl=" + base_table_name + " idx=" + index_name + " newidx=" + newIndex.getIndexName()); newIndex.putToParameters(Constants.DDL_TIME, Long.toString(System .currentTimeMillis() / 1000)); @@ -1438,20 +1492,20 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("alter_index"); } return; } public String getVersion() throws TException { - incrementCounter("getVersion"); - logStartFunction("getVersion"); + endFunction(startFunction("getVersion")); return "3.0"; } public void alter_table(final String dbname, final String name, final Table newTable) throws InvalidOperationException, MetaException { - incrementCounter("alter_table"); - logStartFunction("alter_table: db=" + dbname + " tbl=" + name + startFunction("alter_table",": db=" + dbname + " tbl=" + name + " newtbl=" + newTable.getTableName()); // Update the time if it hasn't been specified. @@ -1461,6 +1515,7 @@ .currentTimeMillis() / 1000)); } + try { executeWithRetry(new Command() { @Override @@ -1476,13 +1531,14 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("alter_table"); } } public List get_tables(final String dbname, final String pattern) throws MetaException { - incrementCounter("get_tables"); - logStartFunction("get_tables: db=" + dbname + " pat=" + pattern); + startFunction("get_tables",": db=" + dbname + " pat=" + pattern); List ret; try { @@ -1497,13 +1553,14 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_tables"); } return ret; } public List get_all_tables(final String dbname) throws MetaException { - incrementCounter("get_all_tables"); - logStartFunction("get_all_tables: db=" + dbname); + startFunction("get_all_tables",": db=" + dbname); List ret; try { @@ -1518,35 +1575,40 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_all_tables"); } return ret; } public List get_fields(String db, String tableName) throws MetaException, UnknownTableException, UnknownDBException { - incrementCounter("get_fields"); - logStartFunction("get_fields: db=" + db + "tbl=" + tableName); + startFunction("get_fields",": db=" + db + "tbl=" + tableName); String[] names = tableName.split("\\."); String base_table_name = names[0]; Table tbl; try { - tbl = get_table(db, base_table_name); - } catch (NoSuchObjectException e) { - throw new UnknownTableException(e.getMessage()); - } - boolean getColsFromSerDe = SerDeUtils.shouldGetColsFromSerDe( - tbl.getSd().getSerdeInfo().getSerializationLib()); - if (!getColsFromSerDe) { - return tbl.getSd().getCols(); - } else { try { - Deserializer s = MetaStoreUtils.getDeserializer(hiveConf, tbl); - return MetaStoreUtils.getFieldsFromDeserializer(tableName, s); - } catch (SerDeException e) { - StringUtils.stringifyException(e); - throw new MetaException(e.getMessage()); + tbl = get_table(db, base_table_name); + } catch (NoSuchObjectException e) { + throw new UnknownTableException(e.getMessage()); } + boolean getColsFromSerDe = SerDeUtils.shouldGetColsFromSerDe( + tbl.getSd().getSerdeInfo().getSerializationLib()); + if (!getColsFromSerDe) { + return tbl.getSd().getCols(); + } else { + try { + Deserializer s = MetaStoreUtils.getDeserializer(hiveConf, tbl); + return MetaStoreUtils.getFieldsFromDeserializer(tableName, s); + } catch (SerDeException e) { + StringUtils.stringifyException(e); + throw new MetaException(e.getMessage()); + } + } + } finally { + endFunction("get_fields"); } } @@ -1565,29 +1627,32 @@ */ public List get_schema(String db, String tableName) throws MetaException, UnknownTableException, UnknownDBException { - incrementCounter("get_schema"); - logStartFunction("get_schema: db=" + db + "tbl=" + tableName); - String[] names = tableName.split("\\."); - String base_table_name = names[0]; - - Table tbl; + startFunction("get_schema",": db=" + db + "tbl=" + tableName); try { - tbl = get_table(db, base_table_name); - } catch (NoSuchObjectException e) { - throw new UnknownTableException(e.getMessage()); - } - List fieldSchemas = get_fields(db, base_table_name); + String[] names = tableName.split("\\."); + String base_table_name = names[0]; - if (tbl == null || fieldSchemas == null) { - throw new UnknownTableException(tableName + " doesn't exist"); - } + Table tbl; + try { + tbl = get_table(db, base_table_name); + } catch (NoSuchObjectException e) { + throw new UnknownTableException(e.getMessage()); + } + List fieldSchemas = get_fields(db, base_table_name); - if (tbl.getPartitionKeys() != null) { - // Combine the column field schemas and the partition keys to create the - // whole schema - fieldSchemas.addAll(tbl.getPartitionKeys()); + if (tbl == null || fieldSchemas == null) { + throw new UnknownTableException(tableName + " doesn't exist"); + } + + if (tbl.getPartitionKeys() != null) { + // Combine the column field schemas and the partition keys to create the + // whole schema + fieldSchemas.addAll(tbl.getPartitionKeys()); + } + return fieldSchemas; + } finally { + endFunction("get_schema"); } - return fieldSchemas; } public String getCpuProfile(int profileDurationInSec) throws TException { @@ -1602,28 +1667,31 @@ */ public String get_config_value(String name, String defaultValue) throws TException, ConfigValSecurityException { - incrementCounter("get_config_value"); - logStartFunction("get_config_value: name=" + name + " defaultValue=" + startFunction("get_config_value",": name=" + name + " defaultValue=" + defaultValue); - if (name == null) { - return defaultValue; - } - // Allow only keys that start with hive.*, hdfs.*, mapred.* for security - // i.e. don't allow access to db password - if (!Pattern.matches("(hive|hdfs|mapred).*", name)) { - throw new ConfigValSecurityException("For security reasons, the " - + "config key " + name + " cannot be accessed"); - } + try { + if (name == null) { + return defaultValue; + } + // Allow only keys that start with hive.*, hdfs.*, mapred.* for security + // i.e. don't allow access to db password + if (!Pattern.matches("(hive|hdfs|mapred).*", name)) { + throw new ConfigValSecurityException("For security reasons, the " + + "config key " + name + " cannot be accessed"); + } - String toReturn = defaultValue; - try { - toReturn = hiveConf.get(name, defaultValue); - } catch (RuntimeException e) { - LOG.error(threadLocalId.get().toString() + ": " - + "RuntimeException thrown in get_config_value - msg: " - + e.getMessage() + " cause: " + e.getCause()); + String toReturn = defaultValue; + try { + toReturn = hiveConf.get(name, defaultValue); + } catch (RuntimeException e) { + LOG.error(threadLocalId.get().toString() + ": " + + "RuntimeException thrown in get_config_value - msg: " + + e.getMessage() + " cause: " + e.getCause()); + } + return toReturn; + } finally { + endFunction("get_config_value"); } - return toReturn; } private List getPartValsFromName(RawStore ms, String dbName, String tblName, @@ -1672,8 +1740,7 @@ public Partition get_partition_by_name(final String db_name,final String tbl_name, final String part_name) throws MetaException, NoSuchObjectException, TException { - incrementCounter("get_partition_by_name"); - logStartFunction("get_partition_by_name: db=" + db_name + " tbl=" + startFunction("get_partition_by_name",": db=" + db_name + " tbl=" + tbl_name + " part=" + part_name); Partition ret = null; @@ -1694,6 +1761,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_partition_by_name"); } return ret; } @@ -1701,8 +1770,7 @@ public Partition append_partition_by_name(final String db_name, final String tbl_name, final String part_name) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { - incrementCounter("append_partition_by_name"); - logStartFunction("append_partition_by_name: db=" + db_name + " tbl=" + startFunction("append_partition_by_name",": db=" + db_name + " tbl=" + tbl_name + " part=" + part_name); Partition ret = null; @@ -1725,6 +1793,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("append_partition_by_name"); } return ret; } @@ -1748,8 +1818,7 @@ public boolean drop_partition_by_name(final String db_name, final String tbl_name, final String part_name, final boolean deleteData) throws NoSuchObjectException, MetaException, TException { - incrementCounter("drop_partition_by_name"); - logStartFunction("drop_partition_by_name: db=" + db_name + " tbl=" + startFunction("drop_partition_by_name",": db=" + db_name + " tbl=" + tbl_name + " part=" + part_name); Boolean ret = null; @@ -1770,6 +1839,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_partition_by_name"); } return ret.booleanValue(); @@ -1779,61 +1850,68 @@ public List get_partitions_ps(final String db_name, final String tbl_name, final List part_vals, final short max_parts) throws MetaException, TException { - incrementCounter("get_partitions_ps"); - logStartPartitionFunction("get_partitions_ps", db_name, tbl_name, part_vals); - List parts = null; - List matchingParts = new ArrayList(); + startPartitionFunction("get_partitions_ps", db_name, tbl_name, part_vals); + try { + List parts = null; + List matchingParts = new ArrayList(); - // This gets all the partitions and then filters based on the specified - // criteria. An alternative approach would be to get all the partition - // names, do the filtering on the names, and get the partition for each - // of the names. that match. + // This gets all the partitions and then filters based on the specified + // criteria. An alternative approach would be to get all the partition + // names, do the filtering on the names, and get the partition for each + // of the names. that match. - try { - parts = get_partitions(db_name, tbl_name, (short) -1); - } catch (NoSuchObjectException e) { - throw new MetaException(e.getMessage()); - } + try { + parts = get_partitions(db_name, tbl_name, (short) -1); + } catch (NoSuchObjectException e) { + throw new MetaException(e.getMessage()); + } - for (Partition p : parts) { - if (MetaStoreUtils.pvalMatches(part_vals, p.getValues())) { - matchingParts.add(p); + for (Partition p : parts) { + if (MetaStoreUtils.pvalMatches(part_vals, p.getValues())) { + matchingParts.add(p); + } } + + return matchingParts; } - - return matchingParts; + finally { + endFunction("get_partitions_ps"); + } } @Override public List get_partition_names_ps(final String db_name, final String tbl_name, final List part_vals, final short max_parts) throws MetaException, TException { - incrementCounter("get_partition_names_ps"); - logStartPartitionFunction("get_partitions_names_ps", db_name, tbl_name, part_vals); - Table t; + startPartitionFunction("get_partitions_names_ps", db_name, tbl_name, part_vals); try { - t = get_table(db_name, tbl_name); - } catch (NoSuchObjectException e) { - throw new MetaException(e.getMessage()); - } + Table t; + try { + t = get_table(db_name, tbl_name); + } catch (NoSuchObjectException e) { + throw new MetaException(e.getMessage()); + } - List partNames = get_partition_names(db_name, tbl_name, max_parts); - List filteredPartNames = new ArrayList(); + List partNames = get_partition_names(db_name, tbl_name, max_parts); + List filteredPartNames = new ArrayList(); - for(String name : partNames) { - LinkedHashMap spec = Warehouse.makeSpecFromName(name); - List vals = new ArrayList(); - // Since we are iterating through a LinkedHashMap, iteration should - // return the partition values in the correct order for comparison. - for (String val : spec.values()) { - vals.add(val); + for(String name : partNames) { + LinkedHashMap spec = Warehouse.makeSpecFromName(name); + List vals = new ArrayList(); + // Since we are iterating through a LinkedHashMap, iteration should + // return the partition values in the correct order for comparison. + for (String val : spec.values()) { + vals.add(val); + } + if (MetaStoreUtils.pvalMatches(part_vals, vals)) { + filteredPartNames.add(name); + } } - if (MetaStoreUtils.pvalMatches(part_vals, vals)) { - filteredPartNames.add(name); - } + + return filteredPartNames; + } finally { + endFunction("get_partitions_names_ps"); } - - return filteredPartNames; } @Override @@ -1860,8 +1938,7 @@ @Override public Index add_index(final Index newIndex, final Table indexTable) throws InvalidObjectException, AlreadyExistsException, MetaException, TException { - incrementCounter("add_partition"); - logStartFunction("add_index: db=" + newIndex.getDbName() + " tbl=" + startFunction("add_index",": db=" + newIndex.getDbName() + " tbl=" + newIndex.getOrigTableName() + " index=" + newIndex.getIndexName()); Index ret = null; try { @@ -1880,6 +1957,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("add_index"); } return ret; } @@ -1945,8 +2024,7 @@ public boolean drop_index_by_name(final String dbName, final String tblName, final String indexName, final boolean deleteData) throws NoSuchObjectException, MetaException, TException { - incrementCounter("drop_index_by_name"); - logStartFunction("drop_index_by_name: db=" + dbName + " tbl=" + startFunction("drop_index_by_name",": db=" + dbName + " tbl=" + tblName + " index=" + indexName); Boolean ret = null; @@ -1967,6 +2045,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_index_by_name"); } return ret.booleanValue(); @@ -2023,8 +2103,7 @@ final String indexName) throws MetaException, NoSuchObjectException, TException { - incrementCounter("get_index_by_name"); - logStartFunction("get_index_by_name: db=" + dbName + " tbl=" + startFunction("get_index_by_name",": db=" + dbName + " tbl=" + tblName + " index=" + indexName); Index ret = null; @@ -2045,6 +2124,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("drop_index_by_name"); } return ret; } @@ -2064,8 +2145,7 @@ @Override public List get_index_names(final String dbName, final String tblName, final short maxIndexes) throws MetaException, TException { - incrementCounter("get_index_names"); - logStartTableFunction("get_index_names", dbName, tblName); + startTableFunction("get_index_names", dbName, tblName); List ret = null; try { @@ -2080,6 +2160,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_index_names"); } return ret; } @@ -2088,8 +2170,7 @@ public List get_indexes(final String dbName, final String tblName, final short maxIndexes) throws NoSuchObjectException, MetaException, TException { - incrementCounter("get_indexes"); - logStartTableFunction("get_indexes", dbName, tblName); + startTableFunction("get_indexes", dbName, tblName); List ret = null; try { @@ -2104,6 +2185,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_indexes"); } return ret; } @@ -2112,8 +2195,7 @@ public List get_partitions_by_filter(final String dbName, final String tblName, final String filter, final short maxParts) throws MetaException, NoSuchObjectException, TException { - incrementCounter("get_partitions_by_filter"); - logStartTableFunction("get_partitions_by_filter", dbName, tblName); + startTableFunction("get_partitions_by_filter", dbName, tblName); List ret = null; try { @@ -2130,6 +2212,8 @@ } catch (Exception e) { assert(e instanceof RuntimeException); throw (RuntimeException)e; + } finally { + endFunction("get_partitions_by_filter"); } return ret; } Index: common/src/java/org/apache/hadoop/hive/common/metrics/Metrics.java =================================================================== --- common/src/java/org/apache/hadoop/hive/common/metrics/Metrics.java (revision 0) +++ common/src/java/org/apache/hadoop/hive/common/metrics/Metrics.java (revision 0) @@ -0,0 +1,157 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.metrics; + +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.util.HashMap; + +import javax.management.MBeanServer; +import javax.management.ObjectName; + +/** + * Metrics Subsystem - allows exposure of a number of named parameters/counters + * via jmx, intended to be used as a static subsystem + * + * Has a couple of primary ways it can be used: + * (i) Using the set and get methods to set and get named parameters + * (ii) Using the incrementCounter method to increment and set named + * parameters in one shot, rather than having to make a get and then a set. + * (iii) Using the startScope and endScope methods to start and end + * named "scopes" that record the number of times they've been instantiated + * and amount of time(in milliseconds) spent inside the scopes. + */ +public class Metrics { + + /** + * MetricsScope : A class that encapsulates an idea of a metered scope. + * Instantiating a named scope and then closing it exposes two counters: + * (i) a "number of calls" counter ( <name>.n ), and + * (ii) a "total number of milliseconds spent between scope open and close" counter. ( <name>.t) + */ + public class MetricsScope { + + String name = null; + boolean isOpen = false; + Long startTime = null; + String numCounter = null; + String timeCounter = null; + + //disable default ctor - so that it can't be created without a name + @SuppressWarnings("unused") + private MetricsScope(){ } + + /** + * Instantiates a named scope - intended to only be called by Metrics, so locally scoped. + * @param name - name of the variable + * @throws IOException + */ + MetricsScope(String name) throws IOException { + this.name = name; + Metrics.incrementCounter(name+".n"); + isOpen = true; + startTime = System.currentTimeMillis(); + } + + /** + * Closes scope, and records the time taken + * @throws IOException + */ + public void close() throws IOException{ + if (isOpen){ + Long endTime = System.currentTimeMillis(); + Metrics.incrementCounter(name+".t",endTime-startTime); + } + isOpen = false; + } + } + + + static MetricsMBean metrics = new MetricsMBeanImpl(); + + static ThreadLocal> threadLocalScopes = new ThreadLocal>(){ + @Override + protected synchronized HashMap initialValue() { + return new HashMap(); + } + }; + + static boolean initialized = false; + + static Metrics m = new Metrics(); + + public static void init() throws Exception { + if (!initialized){ + MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); + ObjectName oname = new ObjectName("org.apache.hadoop.hive.common.metrics:type=MetricsMBean"); + mbs.registerMBean(metrics, oname); + initialized = true; + } + } + + public static void incrementCounter(String name) throws IOException{ + if (!initialized){ return; } + incrementCounter(name,Long.valueOf(1)); + } + + public static void incrementCounter(String name, long increment) throws IOException{ + if (!initialized){ return; } + if (!metrics.hasKey(name)){ + set(name,Long.valueOf(1)); + }else{ + set(name, ((Long)get(name))+increment); + } + } + + public static void set(String name, Object value) throws IOException{ + if (!initialized){ return; } + metrics.put(name,value); + } + + public static Object get(String name) throws IOException{ + if (!initialized){ return null; } + return metrics.get(name); + } + + public static MetricsScope startScope(String name) throws IOException{ + if (!initialized){ return null; } + if (threadLocalScopes.get().containsKey(name)){ + threadLocalScopes.get().get(name).close(); + } + threadLocalScopes.get().put(name, m.new MetricsScope(name)); + return threadLocalScopes.get().get(name); + } + + public static MetricsScope getScope(String name) throws IOException { + if (!initialized) { return null; } + if (threadLocalScopes.get().containsKey(name)){ + return threadLocalScopes.get().get(name); + }else{ + throw new IOException("No metrics scope named "+name); + } + } + + public static void endScope(String name) throws IOException{ + if (!initialized){ return; } + if (threadLocalScopes.get().containsKey(name)){ + threadLocalScopes.get().get(name).close(); + threadLocalScopes.get().remove(name); + } + } + +} Index: common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java =================================================================== --- common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java (revision 0) +++ common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java (revision 0) @@ -0,0 +1,51 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.metrics; + +import java.io.IOException; + +import javax.management.DynamicMBean; + +/** + * MBean definition for metrics tracking from jmx + */ + +public interface MetricsMBean extends DynamicMBean { + + /** + * Check if we're tracking a certain named key/metric + */ + public abstract boolean hasKey(String name); + + /** + * Add a key/metric and its value to track + * @param name Name of the key/metric + * @param value value associated with the key + * @throws Exception + */ + public abstract void put(String name, Object value) throws IOException; + + /** + * + * @param name + * @return + * @throws Exception + */ + public abstract Object get(String name) throws IOException; + +} \ No newline at end of file Index: common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java =================================================================== --- common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java (revision 0) +++ common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java (revision 0) @@ -0,0 +1,150 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common.metrics; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import javax.management.Attribute; +import javax.management.AttributeList; +import javax.management.AttributeNotFoundException; +import javax.management.InvalidAttributeValueException; +import javax.management.MBeanAttributeInfo; +import javax.management.MBeanConstructorInfo; +import javax.management.MBeanException; +import javax.management.MBeanInfo; +import javax.management.MBeanNotificationInfo; +import javax.management.MBeanOperationInfo; +import javax.management.ReflectionException; + + +public class MetricsMBeanImpl implements MetricsMBean { + + Map metricsMap = new HashMap(); + + MBeanAttributeInfo[] attributeInfos; + boolean dirtyAttributeInfoCache = true; + + MBeanConstructorInfo[] ctors = null; + MBeanOperationInfo[] ops = null; + MBeanNotificationInfo[] notifs = null; + + @Override + public Object getAttribute(String arg0) throws AttributeNotFoundException, + MBeanException, ReflectionException { + if (metricsMap.containsKey(arg0)){ + return metricsMap.get(arg0); + }else{ + throw new AttributeNotFoundException("Key ["+arg0+"] not found/tracked"); + } + } + + @Override + public AttributeList getAttributes(String[] arg0) { + AttributeList results = new AttributeList(); + for (String key : arg0){ + results.add(new Attribute(key,metricsMap.get(key))); + } + return results; + } + + @Override + public MBeanInfo getMBeanInfo() { + if (dirtyAttributeInfoCache){ + attributeInfos = new MBeanAttributeInfo[metricsMap.size()]; + int i = 0; + for (String key : metricsMap.keySet()){ + attributeInfos[i] = new MBeanAttributeInfo(key,metricsMap.get(key).getClass().getName(),key,true,false,false); + i++; + } + dirtyAttributeInfoCache = false; + } + return new MBeanInfo(this.getClass().getName(),"metrics information",attributeInfos,ctors,ops,notifs); + } + + @Override + public Object invoke(String arg0, Object[] arg1, String[] arg2) + throws MBeanException, ReflectionException { + // no invocations. + return null; + } + + @Override + public void setAttribute(Attribute attr) throws AttributeNotFoundException, + InvalidAttributeValueException, MBeanException, ReflectionException { + try { + put(attr.getName(),attr.getValue()); + } catch (Exception e) { + throw new MBeanException(e); + } + } + + @Override + public AttributeList setAttributes(AttributeList arg0) { + AttributeList attributesSet = new AttributeList(); + for (Attribute attr : arg0.asList()){ + try { + setAttribute(attr); + attributesSet.add(attr); + } catch (AttributeNotFoundException e) { + // ignore exception - we simply don't add this attribute back in to the resultant set. + } catch (InvalidAttributeValueException e) { + // ditto + } catch (MBeanException e) { + // likewise + } catch (ReflectionException e) { + // and again, one last time. + } + } + return attributesSet; + } + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.common.metrics.MetricsMBean#hasKey(java.lang.String) + */ + public boolean hasKey(String name) { + return metricsMap.containsKey(name); + } + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.common.metrics.MetricsMBean#put(java.lang.String, java.lang.Object) + */ + public void put(String name, Object value) throws IOException { + if (!metricsMap.containsKey(name)){ + dirtyAttributeInfoCache = true; + } + metricsMap.put(name,value); + } + + /* (non-Javadoc) + * @see org.apache.hadoop.hive.common.metrics.MetricsMBean#get(java.lang.String) + */ + public Object get(String name) throws IOException { + try { + return getAttribute(name); + } catch (AttributeNotFoundException e) { + throw new IOException(e); + } catch (MBeanException e) { + throw new IOException(e); + } catch (ReflectionException e) { + throw new IOException(e); + } + } + +}