diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java index e8abf6cf06afc9fa590af3a447eacc67735a69e6..c46d178f5b750f4c2245f704fbaa2ba53ef279cd 100644 --- a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java @@ -44,9 +44,12 @@ import com.google.common.cache.LoadingCache; import com.google.common.collect.Lists; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsScope; import org.apache.hadoop.hive.common.metrics.common.MetricsVariable; @@ -74,7 +77,7 @@ import java.util.concurrent.locks.ReentrantLock; /** - * Codahale-backed Metrics implementation. + * Codahale-backed Metrics implementation. Supports multiple reporters specified via the conf variable HIVE_CODAHALE_METRICS_REPORTER_CLASSES. */ public class CodahaleMetrics implements org.apache.hadoop.hive.common.metrics.common.Metrics { @@ -94,8 +97,7 @@ private HiveConf conf; private final Set reporters = new HashSet(); - private final ThreadLocal> threadLocalScopes - = new ThreadLocal>() { + private final ThreadLocal> threadLocalScopes = new ThreadLocal>() { @Override protected HashMap initialValue() { return new HashMap(); @@ -151,36 +153,30 @@ public void close() { public CodahaleMetrics(HiveConf conf) { this.conf = conf; //Codahale artifacts are lazily-created. - timers = CacheBuilder.newBuilder().build( - new CacheLoader() { - @Override - public com.codahale.metrics.Timer load(String key) { - Timer timer = new Timer(new ExponentiallyDecayingReservoir()); - metricRegistry.register(key, timer); - return timer; - } + timers = CacheBuilder.newBuilder().build(new CacheLoader() { + @Override + public com.codahale.metrics.Timer load(String key) { + Timer timer = new Timer(new ExponentiallyDecayingReservoir()); + metricRegistry.register(key, timer); + return timer; + } + }); + counters = CacheBuilder.newBuilder().build(new CacheLoader() { + @Override + public Counter load(String key) { + Counter counter = new Counter(); + metricRegistry.register(key, counter); + return counter; } - ); - counters = CacheBuilder.newBuilder().build( - new CacheLoader() { - @Override - public Counter load(String key) { - Counter counter = new Counter(); - metricRegistry.register(key, counter); - return counter; - } + }); + meters = CacheBuilder.newBuilder().build(new CacheLoader() { + @Override + public Meter load(String key) { + Meter meter = new Meter(); + metricRegistry.register(key, meter); + return meter; } - ); - meters = CacheBuilder.newBuilder().build( - new CacheLoader() { - @Override - public Meter load(String key) { - Meter meter = new Meter(); - metricRegistry.register(key, meter); - return meter; - } - } - ); + }); gauges = new ConcurrentHashMap(); //register JVM metrics @@ -190,25 +186,10 @@ public Meter load(String key) { registerAll("threads", new ThreadStatesGaugeSet()); registerAll("classLoading", new ClassLoadingGaugeSet()); - //Metrics reporter - Set finalReporterList = new HashSet(); - List metricsReporterNames = Lists.newArrayList( - Splitter.on(",").trimResults().omitEmptyStrings().split(conf.getVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER))); - - if(metricsReporterNames != null) { - for (String metricsReportingName : metricsReporterNames) { - try { - MetricsReporting reporter = MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase()); - finalReporterList.add(reporter); - } catch (IllegalArgumentException e) { - LOGGER.warn("Metrics reporter skipped due to invalid configured reporter: " + metricsReportingName); - } - } - } - initReporting(finalReporterList); + // initialize reporters + initReporting(); } - @Override public void close() throws Exception { if (reporters != null) { @@ -269,7 +250,7 @@ public Long incrementCounter(String name, long increment) { countersLock.lock(); counters.get(key).inc(increment); return counters.get(key).getCount(); - } catch(ExecutionException ee) { + } catch (ExecutionException ee) { throw new IllegalStateException("Error retrieving counter from the metric registry ", ee); } finally { countersLock.unlock(); @@ -288,7 +269,7 @@ public Long decrementCounter(String name, long decrement) { countersLock.lock(); counters.get(key).dec(decrement); return counters.get(key).getCount(); - } catch(ExecutionException ee) { + } catch (ExecutionException ee) { throw new IllegalStateException("Error retrieving counter from the metric registry ", ee); } finally { countersLock.unlock(); @@ -307,8 +288,7 @@ public Object getValue() { } @Override - public void addRatio(String name, MetricsVariable numerator, - MetricsVariable denominator) { + public void addRatio(String name, MetricsVariable numerator, MetricsVariable denominator) { Preconditions.checkArgument(numerator != null, "Numerator must not be null"); Preconditions.checkArgument(denominator != null, "Denominator must not be null"); @@ -340,8 +320,7 @@ public void markMeter(String name) { Meter meter = meters.get(name); meter.mark(); } catch (ExecutionException e) { - throw new IllegalStateException("Error retrieving meter " + name - + " from the metric registry ", e); + throw new IllegalStateException("Error retrieving meter " + name + " from the metric registry ", e); } finally { metersLock.unlock(); } @@ -355,8 +334,7 @@ private Timer getTimer(String name) { Timer timer = timers.get(key); return timer; } catch (ExecutionException e) { - throw new IllegalStateException("Error retrieving timer " + name - + " from the metric registry ", e); + throw new IllegalStateException("Error retrieving timer " + name + " from the metric registry ", e); } finally { timersLock.unlock(); } @@ -379,113 +357,81 @@ public MetricRegistry getMetricRegistry() { @VisibleForTesting public String dumpJson() throws Exception { - ObjectMapper jsonMapper = new ObjectMapper().registerModule( - new MetricsModule(TimeUnit.MILLISECONDS, TimeUnit.MILLISECONDS, false)); + ObjectMapper jsonMapper = + new ObjectMapper().registerModule(new MetricsModule(TimeUnit.MILLISECONDS, TimeUnit.MILLISECONDS, false)); return jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsString(metricRegistry); } /** - * Should be only called once to initialize the reporters + * Initializes reporters from HIVE_CODAHALE_METRICS_REPORTER_CLASSES or HIVE_METRICS_REPORTER, if the former is not defined. + * Note: if both confs are defined, only HIVE_CODAHALE_METRICS_REPORTER_CLASSES will be used. */ - private void initReporting(Set reportingSet) { - for (MetricsReporting reporting : reportingSet) { - switch(reporting) { + private void initReporting() { + + List reporterClasses = Lists.newArrayList(Splitter.on(",").trimResults().omitEmptyStrings().split(conf.getVar(HiveConf.ConfVars.HIVE_CODAHALE_METRICS_REPORTER_CLASSES))); + if (reporterClasses != null || !reporterClasses.isEmpty()) { + initReportingByClasses(reporterClasses); + return; + } + + List metricsReporterNames = Lists.newArrayList( + Splitter.on(",").trimResults().omitEmptyStrings().split(conf.getVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER))); + + if (metricsReporterNames != null || !metricsReporterNames.isEmpty()) { + initReportingByEnum(metricsReporterNames); + } + } + + private void initReportingByClasses(List reporterClasses) { + for (String reporterClass : reporterClasses) { + Class name = null; + try { + name = conf.getClassByName(reporterClass); + } catch (ClassNotFoundException e) { + LOGGER.error("Unable to instantiate metrics reporter class " + reporterClass, e); + continue; + } + try { + Constructor constructor = name.getConstructor(MetricRegistry.class, HiveConf.class); + CodahaleReporter reporter = (CodahaleReporter) constructor.newInstance(metricRegistry, conf); + reporter.start(); + reporters.add(reporter); + } catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) { + LOGGER.error("Unable to instantiate using constructor(MetricRegistry, HiveConf) for reporter " + reporterClass, e); + } + } + } + + private void initReportingByEnum(List metricsReporterNames) { + for (String metricsReportingName : metricsReporterNames) { + MetricsReporting reporter = null; + try { + reporter = MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase()); + } catch (IllegalArgumentException e) { + LOGGER.warn("Metrics reporter skipped due to invalid configured reporter: " + metricsReportingName); + } + switch (reporter) { case CONSOLE: - final ConsoleReporter consoleReporter = ConsoleReporter.forRegistry(metricRegistry) - .convertRatesTo(TimeUnit.SECONDS) - .convertDurationsTo(TimeUnit.MILLISECONDS) - .build(); - consoleReporter.start(1, TimeUnit.SECONDS); + final ConsoleMetricsReporter consoleReporter = new ConsoleMetricsReporter(metricRegistry, conf); + consoleReporter.start(); reporters.add(consoleReporter); break; case JMX: - final JmxReporter jmxReporter = JmxReporter.forRegistry(metricRegistry) - .convertRatesTo(TimeUnit.SECONDS) - .convertDurationsTo(TimeUnit.MILLISECONDS) - .build(); + final JmxMetricsReporter jmxReporter = new JmxMetricsReporter(metricRegistry, conf); jmxReporter.start(); reporters.add(jmxReporter); break; case JSON_FILE: - final JsonFileReporter jsonFileReporter = new JsonFileReporter(); + final JsonFileMetricsReporter jsonFileReporter = new JsonFileMetricsReporter(metricRegistry, conf); jsonFileReporter.start(); reporters.add(jsonFileReporter); break; case HADOOP2: - String applicationName = conf.get(HiveConf.ConfVars.HIVE_METRICS_HADOOP2_COMPONENT_NAME.varname); - long reportingInterval = HiveConf.toTime( - conf.get(HiveConf.ConfVars.HIVE_METRICS_HADOOP2_INTERVAL.varname), - TimeUnit.SECONDS, TimeUnit.SECONDS); - final HadoopMetrics2Reporter metrics2Reporter = HadoopMetrics2Reporter.forRegistry(metricRegistry) - .convertRatesTo(TimeUnit.SECONDS) - .convertDurationsTo(TimeUnit.MILLISECONDS) - .build(DefaultMetricsSystem.initialize(applicationName), // The application-level name - applicationName, // Component name - applicationName, // Component description - "General"); // Name for each metric record - metrics2Reporter.start(reportingInterval, TimeUnit.SECONDS); + final Metrics2Reporter metrics2Reporter = new Metrics2Reporter(metricRegistry, conf); + metrics2Reporter.start(); + reporters.add(metrics2Reporter); break; } } } - - class JsonFileReporter implements Closeable { - private ObjectMapper jsonMapper = null; - private java.util.Timer timer = null; - - public void start() { - this.jsonMapper = new ObjectMapper().registerModule(new MetricsModule(TimeUnit.MILLISECONDS, TimeUnit.MILLISECONDS, false)); - this.timer = new java.util.Timer(true); - - long time = conf.getTimeVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, TimeUnit.MILLISECONDS); - final String pathString = conf.getVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_LOCATION); - - timer.schedule(new TimerTask() { - @Override - public void run() { - BufferedWriter bw = null; - try { - String json = jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsString(metricRegistry); - Path tmpPath = new Path(pathString + ".tmp"); - URI tmpPathURI = tmpPath.toUri(); - FileSystem fs = null; - if (tmpPathURI.getScheme() == null && tmpPathURI.getAuthority() == null) { - //default local - fs = FileSystem.getLocal(conf); - } else { - fs = FileSystem.get(tmpPathURI, conf); - } - fs.delete(tmpPath, true); - bw = new BufferedWriter(new OutputStreamWriter(fs.create(tmpPath, true))); - bw.write(json); - bw.close(); - fs.setPermission(tmpPath, FsPermission.createImmutable((short) 0644)); - - Path path = new Path(pathString); - fs.rename(tmpPath, path); - fs.setPermission(path, FsPermission.createImmutable((short) 0644)); - } catch (Exception e) { - LOGGER.warn("Error writing JSON Metrics to file", e); - } finally { - try { - if (bw != null) { - bw.close(); - } - } catch (IOException e) { - //Ignore. - } - } - - - } - }, 0, time); - } - - @Override - public void close() { - if (timer != null) { - this.timer.cancel(); - } - } - } } diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java new file mode 100644 index 0000000000000000000000000000000000000000..88b7a8076de97f753944909112ca4f3268471ff3 --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java @@ -0,0 +1,12 @@ +package org.apache.hadoop.hive.common.metrics.metrics2; + +import com.codahale.metrics.Reporter; +import java.io.Closeable; + +public interface CodahaleReporter extends Reporter, Closeable { + + /** + * Start the reporter. + */ + public void start(); +} diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/ConsoleMetricsReporter.java b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/ConsoleMetricsReporter.java new file mode 100644 index 0000000000000000000000000000000000000000..4aa88714d3398be03ee6ed2c81db515fb033795a --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/ConsoleMetricsReporter.java @@ -0,0 +1,37 @@ +package org.apache.hadoop.hive.common.metrics.metrics2; + +import com.codahale.metrics.ConsoleReporter; +import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.Reporter; +import java.io.Closeable; +import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.conf.HiveConf; + + +/** + * A wrapper around Codahale ConsoleReporter to make it a pluggable/configurable Hive Metrics reporter. + */ +public class ConsoleMetricsReporter implements CodahaleReporter { + + private final ConsoleReporter reporter; + + public ConsoleMetricsReporter(MetricRegistry registry, HiveConf conf) { + + reporter = ConsoleReporter.forRegistry(registry) + .convertRatesTo(TimeUnit.SECONDS) + .convertDurationsTo(TimeUnit.MILLISECONDS) + .build(); + + } + + @Override + public void start() { + reporter.start(1, TimeUnit.SECONDS); + } + + @Override + public void close() { + reporter.close(); + } +} + diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JmxMetricsReporter.java b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JmxMetricsReporter.java new file mode 100644 index 0000000000000000000000000000000000000000..bc8572c1eacddd7833bfb76610cc8c016d7117a7 --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JmxMetricsReporter.java @@ -0,0 +1,39 @@ +package org.apache.hadoop.hive.common.metrics.metrics2; + +import com.codahale.metrics.JmxReporter; +import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.Reporter; +import java.io.Closeable; +import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.conf.HiveConf; + +/** + * A wrapper around Codahale JmxReporter to make it a pluggable/configurable Hive Metrics reporter. + */ +public class JmxMetricsReporter implements CodahaleReporter { + + private final MetricRegistry registry; + private final HiveConf conf; + private final JmxReporter jmxReporter; + + public JmxMetricsReporter(MetricRegistry registry, HiveConf conf) { + this.registry = registry; + this.conf = conf; + + jmxReporter = JmxReporter.forRegistry(registry) + .convertRatesTo(TimeUnit.SECONDS) + .convertDurationsTo(TimeUnit.MILLISECONDS) + .build(); + } + + @Override + public void start() { + jmxReporter.start(); + } + + @Override + public void close() { + jmxReporter.close(); + } + +} diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java new file mode 100644 index 0000000000000000000000000000000000000000..9263e1a176fd121194536ef97c7a4202332eb3a4 --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java @@ -0,0 +1,94 @@ +package org.apache.hadoop.hive.common.metrics.metrics2; + +import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.Reporter; +import com.codahale.metrics.json.MetricsModule; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.BufferedWriter; +import java.io.Closeable; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.net.URI; +import java.util.TimerTask; +import java.util.concurrent.TimeUnit; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.conf.HiveConf; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +/** + * A metrics reporter for CodahaleMetrics that dumps metrics periodically into a file in JSON format. + */ + +public class JsonFileMetricsReporter implements CodahaleReporter { + + private final MetricRegistry metricRegistry; + private final ObjectMapper jsonMapper; + private final java.util.Timer timer; + private final HiveConf conf; + + private static final Logger LOGGER = LoggerFactory.getLogger(JsonFileMetricsReporter.class); + + public JsonFileMetricsReporter(MetricRegistry registry, HiveConf conf) { + this.metricRegistry = registry; + this.jsonMapper = + new ObjectMapper().registerModule(new MetricsModule(TimeUnit.MILLISECONDS, TimeUnit.MILLISECONDS, false)); + this.timer = new java.util.Timer(true); + this.conf = conf; + } + + @Override + public void start() { + + long time = conf.getTimeVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, TimeUnit.MILLISECONDS); + final String pathString = conf.getVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_LOCATION); + + timer.schedule(new TimerTask() { + @Override + public void run() { + BufferedWriter bw = null; + try { + String json = jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsString(metricRegistry); + Path tmpPath = new Path(pathString + ".tmp"); + URI tmpPathURI = tmpPath.toUri(); + FileSystem fs = null; + if (tmpPathURI.getScheme() == null && tmpPathURI.getAuthority() == null) { + //default local + fs = FileSystem.getLocal(conf); + } else { + fs = FileSystem.get(tmpPathURI, conf); + } + fs.delete(tmpPath, true); + bw = new BufferedWriter(new OutputStreamWriter(fs.create(tmpPath, true))); + bw.write(json); + bw.close(); + fs.setPermission(tmpPath, FsPermission.createImmutable((short) 0644)); + + Path path = new Path(pathString); + fs.rename(tmpPath, path); + fs.setPermission(path, FsPermission.createImmutable((short) 0644)); + } catch (Exception e) { + LOGGER.warn("Error writing JSON Metrics to file", e); + } finally { + try { + if (bw != null) { + bw.close(); + } + } catch (IOException e) { + //Ignore. + } + } + } + }, 0, time); + } + + @Override + public void close() { + if (timer != null) { + this.timer.cancel(); + } + } +} diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/Metrics2Reporter.java b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/Metrics2Reporter.java new file mode 100644 index 0000000000000000000000000000000000000000..68f9a2666c70694d8fe47997065d2fcf9afe3d64 --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/Metrics2Reporter.java @@ -0,0 +1,45 @@ +package org.apache.hadoop.hive.common.metrics.metrics2; + +import com.codahale.metrics.MetricRegistry; +import com.github.joshelser.dropwizard.metrics.hadoop.HadoopMetrics2Reporter; +import java.io.Closeable; +import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import com.codahale.metrics.Reporter; + +/** + * A wrapper around Codahale HadoopMetrics2Reporter to make it a pluggable/configurable Hive Metrics reporter. + */ +public class Metrics2Reporter implements CodahaleReporter { + + private final MetricRegistry metricRegistry; + private final HiveConf conf; + private final HadoopMetrics2Reporter reporter; + + public Metrics2Reporter(MetricRegistry registry, HiveConf conf) { + this.metricRegistry = registry; + this.conf = conf; + String applicationName = conf.get(HiveConf.ConfVars.HIVE_METRICS_HADOOP2_COMPONENT_NAME.varname); + + reporter = HadoopMetrics2Reporter.forRegistry(metricRegistry) + .convertRatesTo(TimeUnit.SECONDS) + .convertDurationsTo(TimeUnit.MILLISECONDS) + .build(DefaultMetricsSystem.initialize(applicationName), // The application-level name + applicationName, // Component name + applicationName, // Component description + "General"); // Name for each metric record + } + + @Override + public void start() { + long reportingInterval = + HiveConf.toTime(conf.get(HiveConf.ConfVars.HIVE_METRICS_HADOOP2_INTERVAL.varname), TimeUnit.SECONDS, TimeUnit.SECONDS); + reporter.start(reportingInterval, TimeUnit.SECONDS); + } + + @Override + public void close() { + reporter.close(); + } +} diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 1fb32533d58af4ec622feb320bf9315da5db6e76..c13cc78419b856f4730921550886bed2a2e49146 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -2201,19 +2201,24 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics", "org.apache.hadoop.hive.common.metrics.LegacyMetrics"), "Hive metrics subsystem implementation class."), - HIVE_METRICS_REPORTER("hive.service.metrics.reporter", "JSON_FILE, JMX", - "Reporter type for metric class org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics, " + + HIVE_CODAHALE_METRICS_REPORTER_CLASSES("hive.service.metrics.codahale.reporter.classes", + "org.apache.hadoop.hive.common.metrics.metrics2.JsonFileMetricsReporter, " + + "org.apache.hadoop.hive.common.metrics.metrics2.JmxMetricsReporter", + "Comma seperated list of reporter implementation classes for metric class org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics"), + HIVE_METRICS_REPORTER("hive.service.metrics.reporter", "", + "Deprecated, use HIVE_CODAHALE_METRICS_REPORTER_CLASSES instead." + + "Reporter implementation classes for metric class org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics, " + "comma separated list of JMX, CONSOLE, JSON_FILE, HADOOP2"), HIVE_METRICS_JSON_FILE_LOCATION("hive.service.metrics.file.location", "/tmp/report.json", "For metric class org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics JSON_FILE reporter, the location of local JSON metrics file. " + "This file will get overwritten at every interval."), HIVE_METRICS_JSON_FILE_INTERVAL("hive.service.metrics.file.frequency", "5s", new TimeValidator(TimeUnit.MILLISECONDS), - "For metric class org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics JSON_FILE reporter, " + + "For metric class org.apache.hadoop.hive.common.metrics.metrics2.JsonFileMetricsReporter, " + "the frequency of updating JSON metrics file."), HIVE_METRICS_HADOOP2_INTERVAL("hive.service.metrics.hadoop2.frequency", "30s", new TimeValidator(TimeUnit.SECONDS), - "For metric class org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics HADOOP2 reporter, " + + "For metric class org.apache.hadoop.hive.common.metrics.metrics2.Metrics2Reporter, " + "the frequency of updating the HADOOP2 metrics system."), HIVE_METRICS_HADOOP2_COMPONENT_NAME("hive.service.metrics.hadoop2.component", "hive", diff --git a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java index aa4e75f9f8160d1b54b14c1a23ea42e156bd45ca..d33fc30c93871f7f4379e6a2b2516b9316ebbcc5 100644 --- a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java +++ b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java @@ -57,9 +57,10 @@ public void before() throws Exception { jsonReportFile = new File(workDir, "json_reporting"); jsonReportFile.delete(); + conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "local"); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName()); - conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); + conf.setVar(HiveConf.ConfVars.HIVE_CODAHALE_METRICS_REPORTER_CLASSES, "org.apache.hadoop.hive.common.metrics.metrics2.JsonFileMetricsReporter, org.apache.hadoop.hive.common.metrics.metrics2.JmxMetricsReporter"); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_LOCATION, jsonReportFile.toString()); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, "100ms");