diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 8e99cf46a4..b5194dd09e 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -9882,7 +9882,7 @@ public static void main(String[] args) throws Throwable { } } - private static AtomicInteger openConnections; + private static Counter openConnections; /** * Start Metastore based on a passed {@link HadoopThriftAuthBridge} @@ -10027,7 +10027,7 @@ public static void startMetaStore(int port, HadoopThriftAuthBridge bridge, // Metrics will have already been initialized if we're using them since HMSHandler // initializes them. - openConnections = Metrics.getOrCreateGauge(MetricsConstants.OPEN_CONNECTIONS); + openConnections = Metrics.getOpenConnectionsCounter(); TThreadPoolServer.Args args = new TThreadPoolServer.Args(serverSocket) .processor(processor) @@ -10045,13 +10045,13 @@ public void preServe() { @Override public ServerContext createContext(TProtocol tProtocol, TProtocol tProtocol1) { - openConnections.incrementAndGet(); + openConnections.inc(); return null; } @Override public void deleteContext(ServerContext serverContext, TProtocol tProtocol, TProtocol tProtocol1) { - openConnections.decrementAndGet(); + openConnections.dec(); // If the IMetaStoreClient#close was called, HMSHandler#shutdown would have already // cleaned up thread local RawStore. Otherwise, do it now. cleanupRawStore(); diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/Metrics.java standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/Metrics.java index 53ee84a325..1522cfe5cd 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/Metrics.java +++ standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/Metrics.java @@ -29,7 +29,9 @@ import com.codahale.metrics.Counter; import com.codahale.metrics.Gauge; import com.codahale.metrics.JmxReporter; +import com.codahale.metrics.Metric; import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.MetricSet; import com.codahale.metrics.Reporter; import com.codahale.metrics.ScheduledReporter; import com.codahale.metrics.Slf4jReporter; @@ -153,6 +155,10 @@ public Integer getValue() { } } + public static Counter getOpenConnectionsCounter() { + return getOrCreateCounter(MetricsConstants.OPEN_CONNECTIONS); + } + @VisibleForTesting static List getReporters() { return self.reporters; @@ -161,11 +167,13 @@ public Integer getValue() { private Metrics(Configuration conf) { registry = new MetricRegistry(); - registry.registerAll(new GarbageCollectorMetricSet()); - registry.registerAll(new BufferPoolMetricSet(ManagementFactory.getPlatformMBeanServer())); - registry.registerAll(new MemoryUsageGaugeSet()); - registry.registerAll(new ThreadStatesGaugeSet()); - registry.registerAll(new ClassLoadingGaugeSet()); + // this is the same logic as implemented in CodahaleMetrics in hive-common package, + // but standalone-metastore project doesn't depend on that + registerAll("gc", new GarbageCollectorMetricSet()); + registerAll("buffers", new BufferPoolMetricSet(ManagementFactory.getPlatformMBeanServer())); + registerAll("memory", new MemoryUsageGaugeSet()); + registerAll("threads", new ThreadStatesGaugeSet()); + registerAll("classLoading", new ClassLoadingGaugeSet()); /* * This is little complicated. First we look for our own config values on this. If those @@ -255,4 +263,14 @@ private Metrics(Configuration conf) { // Create map for tracking gauges gaugeAtomics = new HashMap<>(); } + + private void registerAll(String prefix, MetricSet metricSet) { + for (Map.Entry entry : metricSet.getMetrics().entrySet()) { + if (entry.getValue() instanceof MetricSet) { + registerAll(prefix + "." + entry.getKey(), (MetricSet) entry.getValue()); + } else { + registry.register(prefix + "." + entry.getKey(), entry.getValue()); + } + } + } } diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java index 40a51751e3..29d051a632 100644 --- standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.metastore.metrics; import java.io.File; +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; import java.nio.file.Files; import java.nio.file.Paths; import java.util.List; @@ -53,6 +55,7 @@ public void slf4jReporter() throws Exception { MetastoreConf.ConfVars.METRICS_SLF4J_LOG_FREQUENCY_MINS, REPORT_INTERVAL, TimeUnit.SECONDS); // 1. Verify the default level (INFO) + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_SLF4J_LOG_LEVEL, "INFO"); validateSlf4jReporter(conf, Level.INFO); // 2. Verify an overridden level (DEBUG) @@ -60,8 +63,8 @@ public void slf4jReporter() throws Exception { validateSlf4jReporter(conf, Level.DEBUG); } - private void validateSlf4jReporter(Configuration conf, Level level) throws InterruptedException { - Metrics.initialize(conf); + private void validateSlf4jReporter(Configuration conf, Level level) throws Exception { + initializeMetrics(conf); Counter counter = Metrics.getOrCreateCounter("my-counter"); counter.inc(5); // Make sure it has a chance to dump it. @@ -87,7 +90,7 @@ public void jsonReporter() throws Exception { MetastoreConf.setTimeVar(conf, MetastoreConf.ConfVars.METRICS_JSON_FILE_INTERVAL, REPORT_INTERVAL, TimeUnit.SECONDS); - Metrics.initialize(conf); + initializeMetrics(conf); Counter counter = Metrics.getOrCreateCounter("my-counter"); for (int i = 0; i < 5; i++) { @@ -100,6 +103,35 @@ public void jsonReporter() throws Exception { } } + @Test + public void testJsonStructure() throws Exception { + File jsonReportFile = File.createTempFile("TestMetrics", ".json"); + String jsonFile = jsonReportFile.getAbsolutePath(); + + Configuration conf = MetastoreConf.newMetastoreConf(); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_REPORTERS, "json"); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_JSON_FILE_LOCATION, jsonFile); + MetastoreConf.setTimeVar(conf, MetastoreConf.ConfVars.METRICS_JSON_FILE_INTERVAL, + REPORT_INTERVAL, TimeUnit.SECONDS); + + initializeMetrics(conf); + + Counter openConnections = Metrics.getOpenConnectionsCounter(); + openConnections.inc(); + + Thread.sleep(REPORT_INTERVAL * 1000 + REPORT_INTERVAL * 1000 / 2); + + String json = new String(MetricsTestUtils.getFileData(jsonFile, 200, 10)); + + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "buffers.direct.capacity"); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "memory.heap.used"); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "threads.count"); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "classLoading.loaded"); + + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, + MetricsConstants.OPEN_CONNECTIONS, 1); + } + @Test public void allReporters() throws Exception { String jsonFile = System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + @@ -108,7 +140,7 @@ public void allReporters() throws Exception { MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_REPORTERS, "json,jmx,console,hadoop"); MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_JSON_FILE_LOCATION, jsonFile); - Metrics.initialize(conf); + initializeMetrics(conf); Assert.assertEquals(4, Metrics.getReporters().size()); } @@ -125,7 +157,7 @@ public void allReportersHiveConfig() throws Exception { "org.apache.hadoop.hive.common.metrics.metrics2.Metrics2Reporter"); MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_JSON_FILE_LOCATION, jsonFile); - Metrics.initialize(conf); + initializeMetrics(conf); Assert.assertEquals(4, Metrics.getReporters().size()); } @@ -139,7 +171,7 @@ public void allReportersOldHiveConfig() throws Exception { "JSON_FILE,JMX,CONSOLE,HADOOP2"); MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_JSON_FILE_LOCATION, jsonFile); - Metrics.initialize(conf); + initializeMetrics(conf); Assert.assertEquals(4, Metrics.getReporters().size()); } @@ -150,7 +182,7 @@ public void defaults() throws Exception { "TestMetricsOutput.json"; Configuration conf = MetastoreConf.newMetastoreConf(); MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_JSON_FILE_LOCATION, jsonFile); - Metrics.initialize(conf); + initializeMetrics(conf); Assert.assertEquals(2, Metrics.getReporters().size()); } @@ -173,9 +205,19 @@ public void defaults() throws Exception { } static void verifyMetricsJson(String json, MetricsCategory category, String metricsName, - Object expectedValue) throws Exception { + Object expectedValue) throws Exception { JsonNode jsonNode = getJsonNode(json, category, metricsName); - Assert.assertEquals(expectedValue.toString(), jsonNode.asText()); + Assert.assertTrue(String.format("%s.%s.%s should not be empty", category.category, + metricsName, category.metricsHandle), !jsonNode.asText().isEmpty()); + + if (expectedValue != null) { + Assert.assertEquals(expectedValue.toString(), jsonNode.asText()); + } + } + + static void verifyMetricsJson(String json, MetricsCategory category, String metricsName) + throws Exception { + verifyMetricsJson(json, category, metricsName, null); } static JsonNode getJsonNode(String json, MetricsCategory category, String metricsName) throws Exception { @@ -195,4 +237,14 @@ static JsonNode getJsonNode(String json, MetricsCategory category, String metric return Files.readAllBytes(Paths.get(path)); } } + + private void initializeMetrics(Configuration conf) throws Exception { + Field field = Metrics.class.getDeclaredField("self"); + field.setAccessible(true); + + Constructor cons = Metrics.class.getDeclaredConstructor(Configuration.class); + cons.setAccessible(true); + + field.set(null, cons.newInstance(conf)); + } }