diff --git a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java index 3c988da310..6f62906a8c 100644 --- a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java +++ b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java @@ -36,6 +36,7 @@ import java.lang.management.ManagementFactory; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -202,11 +203,11 @@ public void run() { } } - private void incrementMetricsCounter(String name, long count) { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + private void incrementMetricsCounter(final String name, final long count) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - metrics.incrementCounter(name, count); + metrics.get().incrementCounter(name, count); } catch (Exception e) { LOG.warn("Error Reporting JvmPauseMonitor to Metrics system", e); } diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java b/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java index d05c7289e5..79f8f377e3 100644 --- a/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java @@ -123,11 +123,18 @@ public void open() { public void close() { if (isOpen) { Long endTime = System.currentTimeMillis(); - synchronized(metrics) { - Long num = metrics.incrementCounter(numCounter); - Long time = metrics.incrementCounter(timeCounter, endTime - startTime); - if (num != null && time != null) { - metrics.set(avgTimeCounter, Double.valueOf(time.doubleValue() / num.doubleValue())); + synchronized (metrics) { + metrics.incrementCounter(numCounter); + metrics.incrementCounter(timeCounter, endTime - startTime); + try { + Long num = (Long) metrics.get(numCounter); + Long time = (Long) metrics.get(timeCounter); + if (num != null && time != null) { + metrics.set(avgTimeCounter, + Double.valueOf(time.doubleValue() / num.doubleValue())); + } + } catch (JMException e) { + LOG.warn("Unable to access metrics counters"); } } } else { @@ -174,11 +181,13 @@ public LegacyMetrics(HiveConf conf) throws Exception { mbs.registerMBean(metrics, oname); } - public Long incrementCounter(String name) { - return incrementCounter(name,Long.valueOf(1)); + @Override + public void incrementCounter(String name) { + incrementCounter(name,Long.valueOf(1)); } - public Long incrementCounter(String name, long increment) { + @Override + public void incrementCounter(String name, long increment) { Long value = null; synchronized(metrics) { if (!metrics.hasKey(name)) { @@ -194,14 +203,14 @@ public Long incrementCounter(String name, long increment) { } } } - return value; } - public Long decrementCounter(String name) { - return decrementCounter(name, Long.valueOf(1)); + @Override + public void decrementCounter(String name) { + decrementCounter(name, Long.valueOf(1)); } - public Long decrementCounter(String name, long decrement) { + public void decrementCounter(String name, long decrement) { Long value = null; synchronized(metrics) { if (!metrics.hasKey(name)) { @@ -217,7 +226,6 @@ public Long decrementCounter(String name, long decrement) { } } } - return value; } @Override diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java b/common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java index 99d3e57d84..f8c613122d 100644 --- a/common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java @@ -60,7 +60,7 @@ * @param name * @return */ - public Long incrementCounter(String name); + public void incrementCounter(String name); /** * Increments a counter of the given name by "increment" @@ -68,7 +68,7 @@ * @param increment * @return */ - public Long incrementCounter(String name, long increment); + public void incrementCounter(String name, long increment); /** @@ -76,7 +76,7 @@ * @param name * @return */ - public Long decrementCounter(String name); + public void decrementCounter(String name); /** * Decrements a counter of the given name by "decrement" @@ -84,7 +84,7 @@ * @param decrement * @return */ - public Long decrementCounter(String name, long decrement); + public void decrementCounter(String name, long decrement); /** diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsFactory.java b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsFactory.java index b8e9a01bbe..43d5dbf18e 100644 --- a/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsFactory.java +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsFactory.java @@ -17,46 +17,44 @@ */ package org.apache.hadoop.hive.common.metrics.common; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.util.ReflectionUtils; - import java.lang.reflect.Constructor; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicReference; + +import org.apache.hadoop.hive.conf.HiveConf; /** * Class that manages a static Metric instance for this process. */ public class MetricsFactory { - //Volatile ensures that static access returns Metrics instance in fully-initialized state. - //Alternative is to synchronize static access, which has performance penalties. - private volatile static Metrics metrics; + private static AtomicReference metrics = new AtomicReference<>(); /** * Initializes static Metrics instance. */ public synchronized static void init(HiveConf conf) throws Exception { - if (metrics == null) { - Class metricsClass = conf.getClassByName( - conf.getVar(HiveConf.ConfVars.HIVE_METRICS_CLASS)); - Constructor constructor = metricsClass.getConstructor(HiveConf.class); - metrics = (Metrics) constructor.newInstance(conf); + if (metrics.get() == null) { + Class metricsClass = conf + .getClassByName(conf.getVar(HiveConf.ConfVars.HIVE_METRICS_CLASS)); + Constructor constructor = metricsClass.getConstructor(HiveConf.class); + metrics.set((Metrics) constructor.newInstance(conf)); } } /** * Returns static Metrics instance, null if not initialized or closed. */ - public static Metrics getInstance() { - return metrics; + public static Optional getInstance() { + return Optional.ofNullable(metrics.get()); } /** * Closes and removes static Metrics instance. */ public synchronized static void close() throws Exception { - if (metrics != null) { - metrics.close(); - metrics = null; + if (metrics.get() != null) { + metrics.getAndSet(null).close(); } } } diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java index 4f35a6da60..5634548c81 100644 --- a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java @@ -18,11 +18,9 @@ package org.apache.hadoop.hive.common.metrics.metrics2; -import com.codahale.metrics.ConsoleReporter; import com.codahale.metrics.Counter; import com.codahale.metrics.ExponentiallyDecayingReservoir; import com.codahale.metrics.Gauge; -import com.codahale.metrics.JmxReporter; import com.codahale.metrics.Meter; import com.codahale.metrics.Metric; import com.codahale.metrics.MetricRegistry; @@ -35,45 +33,28 @@ import com.codahale.metrics.jvm.MemoryUsageGaugeSet; import com.codahale.metrics.jvm.ThreadStatesGaugeSet; import com.fasterxml.jackson.databind.ObjectMapper; -import com.github.joshelser.dropwizard.metrics.hadoop.HadoopMetrics2Reporter; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import com.google.common.collect.Lists; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsScope; import org.apache.hadoop.hive.common.metrics.common.MetricsVariable; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.BufferedWriter; import java.io.Closeable; -import java.io.IOException; -import java.io.OutputStreamWriter; import java.lang.management.ManagementFactory; -import java.net.URI; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; import java.util.Set; -import java.util.TimerTask; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutionException; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; /** * Codahale-backed Metrics implementation. @@ -83,26 +64,22 @@ public static final Logger LOGGER = LoggerFactory.getLogger(CodahaleMetrics.class); public final MetricRegistry metricRegistry = new MetricRegistry(); - private final Lock timersLock = new ReentrantLock(); - private final Lock countersLock = new ReentrantLock(); - private final Lock gaugesLock = new ReentrantLock(); - private final Lock metersLock = new ReentrantLock(); - private LoadingCache timers; - private LoadingCache counters; - private LoadingCache meters; - private ConcurrentHashMap gauges; + private ConcurrentMap timers; + private ConcurrentMap counters; + private ConcurrentMap meters; + private ConcurrentMap> gauges; private HiveConf conf; - private final Set reporters = new HashSet(); + private final Set reporters = new HashSet<>(); - private final ThreadLocal> threadLocalScopes - = new ThreadLocal>() { - @Override - protected HashMap initialValue() { - return new HashMap(); - } - }; + private final ThreadLocal> threadLocalScopes = + new ThreadLocal>() { + @Override + protected HashMap initialValue() { + return new HashMap<>(); + } + }; public class CodahaleMetricsScope implements MetricsScope { @@ -152,38 +129,11 @@ public void close() { public CodahaleMetrics(HiveConf conf) { this.conf = conf; - //Codahale artifacts are lazily-created. - timers = CacheBuilder.newBuilder().build( - new CacheLoader() { - @Override - public com.codahale.metrics.Timer load(String key) { - Timer timer = new Timer(new ExponentiallyDecayingReservoir()); - metricRegistry.register(key, timer); - return timer; - } - } - ); - counters = CacheBuilder.newBuilder().build( - new CacheLoader() { - @Override - public Counter load(String key) { - Counter counter = new Counter(); - metricRegistry.register(key, counter); - return counter; - } - } - ); - meters = CacheBuilder.newBuilder().build( - new CacheLoader() { - @Override - public Meter load(String key) { - Meter meter = new Meter(); - metricRegistry.register(key, meter); - return meter; - } - } - ); - gauges = new ConcurrentHashMap(); + + timers = new ConcurrentHashMap<>(); + counters = new ConcurrentHashMap<>(); + meters = new ConcurrentHashMap<>(); + gauges = new ConcurrentHashMap<>(); //register JVM metrics registerAll("gc", new GarbageCollectorMetricSet()); @@ -199,17 +149,19 @@ public Meter load(String key) { @Override public void close() throws Exception { - if (reporters != null) { - for (Closeable reporter : reporters) { + for (Closeable reporter : reporters) { + try { reporter.close(); + } catch (Exception e) { + LOGGER.warn("Error while closing reporter {}", reporter, e); } } for (Map.Entry metric : metricRegistry.getMetrics().entrySet()) { metricRegistry.remove(metric.getKey()); } - timers.invalidateAll(); - counters.invalidateAll(); - meters.invalidateAll(); + timers.clear(); + counters.clear(); + meters.clear(); } @Override @@ -246,46 +198,35 @@ public void endScope(MetricsScope scope) { } @Override - public Long incrementCounter(String name) { - return incrementCounter(name, 1L); + public void incrementCounter(String name) { + incrementCounter(name, 1L); } @Override - public Long incrementCounter(String name, long increment) { - String key = name; - try { - countersLock.lock(); - counters.get(key).inc(increment); - return counters.get(key).getCount(); - } catch(ExecutionException ee) { - throw new IllegalStateException("Error retrieving counter from the metric registry ", ee); - } finally { - countersLock.unlock(); - } + public void incrementCounter(final String name, final long increment) { + counters.computeIfAbsent(name, key -> { + Counter counter = new Counter(); + metricRegistry.register(key, counter); + return counter; + }).inc(increment); } @Override - public Long decrementCounter(String name) { - return decrementCounter(name, 1L); + public void decrementCounter(String name) { + decrementCounter(name, 1L); } @Override - public Long decrementCounter(String name, long decrement) { - String key = name; - try { - countersLock.lock(); - counters.get(key).dec(decrement); - return counters.get(key).getCount(); - } catch(ExecutionException ee) { - throw new IllegalStateException("Error retrieving counter from the metric registry ", ee); - } finally { - countersLock.unlock(); + public void decrementCounter(final String name, final long decrement) { + final Counter counter = counters.get(name); + if (counter != null) { + counter.dec(decrement); } } @Override - public void addGauge(String name, final MetricsVariable variable) { - Gauge gauge = new Gauge() { + public void addGauge(String name, final MetricsVariable variable) { + Gauge gauge = new Gauge() { @Override public Object getValue() { return variable.getValue(); @@ -294,24 +235,18 @@ public Object getValue() { addGaugeInternal(name, gauge); } - @Override - public void removeGauge(String name) { - try { - gaugesLock.lock(); - gauges.remove(name); - // Metrics throws an Exception if we don't do this when the key already exists - if (metricRegistry.getGauges().containsKey(name)) { - metricRegistry.remove(name); - } - } finally { - gaugesLock.unlock(); - } + public void removeGauge(final String name) { + gauges.computeIfPresent(name, (key, value) -> { + final boolean removed = metricRegistry.remove(name); + LOGGER.debug("Gauge [{}] removed from registry: {}", key, removed); + return null; + }); } @Override public void addRatio(String name, MetricsVariable numerator, - MetricsVariable denominator) { + MetricsVariable denominator) { Preconditions.checkArgument(numerator != null, "Numerator must not be null"); Preconditions.checkArgument(denominator != null, "Denominator must not be null"); @@ -319,50 +254,33 @@ public void addRatio(String name, MetricsVariable numerator, addGaugeInternal(name, gauge); } - private void addGaugeInternal(String name, Gauge gauge) { - try { - gaugesLock.lock(); - gauges.put(name, gauge); - // Metrics throws an Exception if we don't do this when the key already exists - if (metricRegistry.getGauges().containsKey(name)) { - LOGGER.warn("A Gauge with name [" + name + "] already exists. " - + " The old gauge will be overwritten, but this is not recommended"); - metricRegistry.remove(name); + private void addGaugeInternal(final String name, final Gauge gauge) { + gauges.compute(name, (key, value) -> { + final boolean removed = metricRegistry.remove(name); + if (removed) { + LOGGER.warn("A Gauge with name [{}] already exists. " + + " The old gauge will be overwritten, but this is not recommended", key); } metricRegistry.register(name, gauge); - } finally { - gaugesLock.unlock(); - } + return value; + }); } @Override - public void markMeter(String name) { - String key = name; - try { - metersLock.lock(); - Meter meter = meters.get(name); - meter.mark(); - } catch (ExecutionException e) { - throw new IllegalStateException("Error retrieving meter " + name - + " from the metric registry ", e); - } finally { - metersLock.unlock(); - } + public void markMeter(final String name) { + meters.computeIfAbsent(name, key -> { + Meter meter = new Meter(); + metricRegistry.register(key, meter); + return meter; + }).mark(); } - // This method is necessary to synchronize lazy-creation to the timers. - private Timer getTimer(String name) { - String key = name; - try { - timersLock.lock(); - Timer timer = timers.get(key); + private Timer getTimer(final String name) { + return timers.computeIfAbsent(name, key -> { + Timer timer = new Timer(new ExponentiallyDecayingReservoir()); + metricRegistry.register(key, timer); return timer; - } catch (ExecutionException e) { - throw new IllegalStateException("Error retrieving timer " + name - + " from the metric registry ", e); - } finally { - timersLock.unlock(); - } + }); } private void registerAll(String prefix, MetricSet metricSet) { @@ -388,16 +306,23 @@ public String dumpJson() throws Exception { } /** - * Initializes reporters from HIVE_CODAHALE_METRICS_REPORTER_CLASSES or HIVE_METRICS_REPORTER if the former is not defined. - * Note: if both confs are defined, only HIVE_CODAHALE_METRICS_REPORTER_CLASSES will be used. + * Initializes reporters from HIVE_CODAHALE_METRICS_REPORTER_CLASSES or + * HIVE_METRICS_REPORTER if the former is not defined. Note: if both confs are + * defined, only HIVE_CODAHALE_METRICS_REPORTER_CLASSES will be used. */ private void initReporting() { - - if (!(initCodahaleMetricsReporterClasses() || initMetricsReporter())) { - LOGGER.warn("Unable to initialize metrics reporting"); + try { + initCodahaleMetricsReporterClasses(); + } catch (Exception e1) { + LOGGER.warn("Could not initiate Codahale Metrics Reporter Classes", e1); + try { + initMetricsReporter(); + } catch (Exception e2) { + LOGGER.warn("Unable to initialize metrics reporting", e2); + } } if (reporters.isEmpty()) { - // log a warning incase no reporters were successfully added + // log a warning in case no reporters were successfully added LOGGER.warn("No reporters configured for codahale metrics!"); } } @@ -406,59 +331,60 @@ private void initReporting() { * Initializes reporting using HIVE_CODAHALE_METRICS_REPORTER_CLASSES. * @return whether initialization was successful or not */ - private boolean initCodahaleMetricsReporterClasses() { - - List reporterClasses = Lists.newArrayList(Splitter.on(",").trimResults(). - omitEmptyStrings().split(conf.getVar(HiveConf.ConfVars.HIVE_CODAHALE_METRICS_REPORTER_CLASSES))); - if (reporterClasses.isEmpty()) { - return false; - } + private void initCodahaleMetricsReporterClasses() { + final String reporterClassesConf = + conf.getVar(HiveConf.ConfVars.HIVE_CODAHALE_METRICS_REPORTER_CLASSES); + final Iterable reporterClasses = Splitter.on(",").trimResults() + .omitEmptyStrings().split(reporterClassesConf); for (String reporterClass : reporterClasses) { - Class name = null; + Class name = null; try { name = conf.getClassByName(reporterClass); } catch (ClassNotFoundException e) { - LOGGER.error("Unable to instantiate metrics reporter class " + reporterClass + - " from conf HIVE_CODAHALE_METRICS_REPORTER_CLASSES", e); - throw new IllegalArgumentException(e); + throw new IllegalArgumentException( + "Unable to instantiate metrics reporter class " + reporterClass + + " from conf HIVE_CODAHALE_METRICS_REPORTER_CLASSES", + e); } try { - // Note: Hadoop metric reporter does not support tags. We create a single reporter for all metrics. - Constructor constructor = name.getConstructor(MetricRegistry.class, HiveConf.class); - CodahaleReporter reporter = (CodahaleReporter) constructor.newInstance(metricRegistry, conf); + // Note: Hadoop metric reporter does not support tags. We create a + // single reporter for all metrics. + Constructor constructor = + name.getConstructor(MetricRegistry.class, HiveConf.class); + CodahaleReporter reporter = + (CodahaleReporter) constructor.newInstance(metricRegistry, conf); reporter.start(); reporters.add(reporter); - } catch (NoSuchMethodException | InstantiationException | - IllegalAccessException | InvocationTargetException e) { - LOGGER.error("Unable to instantiate using constructor(MetricRegistry, HiveConf) for" - + " reporter " + reporterClass + " from conf HIVE_CODAHALE_METRICS_REPORTER_CLASSES", + } catch (NoSuchMethodException | InstantiationException + | IllegalAccessException | InvocationTargetException e) { + throw new IllegalArgumentException( + "Unable to instantiate using constructor(MetricRegistry, HiveConf)" + + " for reporter " + reporterClass + + " from conf HIVE_CODAHALE_METRICS_REPORTER_CLASSES", e); - throw new IllegalArgumentException(e); } } - return true; } /** * Initializes reporting using HIVE_METRICS+REPORTER. * @return whether initialization was successful or not */ - private boolean initMetricsReporter() { + private void initMetricsReporter() { - List metricsReporterNames = Lists.newArrayList(Splitter.on(",").trimResults(). - omitEmptyStrings().split(conf.getVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER))); - if (metricsReporterNames.isEmpty()) { - return false; - } + Iterable metricsReporterNames = + Splitter.on(",").trimResults().omitEmptyStrings() + .split(conf.getVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER)); - MetricsReporting reporter = null; for (String metricsReportingName : metricsReporterNames) { + MetricsReporting reporter = null; try { - reporter = MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase()); + reporter = + MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase()); } catch (IllegalArgumentException e) { - LOGGER.error("Invalid reporter name " + metricsReportingName, e); - throw e; + throw new RuntimeException( + "Invalid reporter name " + metricsReportingName, e); } CodahaleReporter codahaleReporter = null; switch (reporter) { @@ -476,12 +402,10 @@ private boolean initMetricsReporter() { break; default: LOGGER.warn("Unhandled reporter " + reporter + " provided."); + continue; } - if (codahaleReporter != null) { - codahaleReporter.start(); - reporters.add(codahaleReporter); - } + codahaleReporter.start(); + reporters.add(codahaleReporter); } - return true; } } diff --git a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java index 66f298c621..c1806756ea 100644 --- a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java +++ b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java @@ -23,13 +23,13 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; +import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.hive.conf.HiveConf; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedWriter; -import java.io.FileWriter; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -39,6 +39,7 @@ import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; import java.util.Set; +import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; @@ -121,15 +122,26 @@ public void start() { } } - executorService = Executors.newScheduledThreadPool(1, - new ThreadFactoryBuilder().setNameFormat(JSON_REPORTER_THREAD_NAME).build()); - executorService.scheduleWithFixedDelay(this,0, interval, TimeUnit.MILLISECONDS); + executorService = + Executors.newSingleThreadScheduledExecutor(new ThreadFactoryBuilder() + .setNameFormat(JSON_REPORTER_THREAD_NAME).build()); + executorService.scheduleWithFixedDelay(this, 0, interval, + TimeUnit.MILLISECONDS); } + /** + * Waits up to 60 seconds for internal {@link ExecutorService} to close. Causes a new JSON file to + */ @Override public void close() { if (executorService != null) { - executorService.shutdown(); + MoreExecutors.shutdownAndAwaitTermination(executorService, 60L, + TimeUnit.SECONDS); + if (executorService.isTerminated()) { + // Dump on close in case a file has not been written in a while + run(); + } + executorService = null; } } @@ -137,15 +149,6 @@ public void close() { public void run() { Path tmpFile = null; try { - // Dump metrics to string as JSON - String json = null; - try { - json = jsonWriter.writeValueAsString(metricRegistry); - } catch (JsonProcessingException e) { - LOGGER.error("Unable to convert json to string ", e); - return; - } - // Metrics are first dumped to a temp file which is then renamed to the destination try { tmpFile = Files.createTempFile(metricsDir, "hmetrics", "json", FILE_ATTRS); @@ -163,10 +166,15 @@ public void run() { } // Write json to the temp file. - try (BufferedWriter bw = new BufferedWriter(new FileWriter(tmpFile.toFile()))) { + try (BufferedWriter bw = Files.newBufferedWriter(tmpFile)) { + final String json = jsonWriter.writeValueAsString(metricRegistry); + LOGGER.trace("JSON metrics: {}", json); bw.write(json); + } catch (JsonProcessingException e) { + LOGGER.error("Unable to convert json to string ", e); + return; } catch (IOException e) { - LOGGER.error("Unable to write to temp file " + tmpFile, e); + LOGGER.error("Unable to write to temp file: {}", tmpFile, e); return; } @@ -174,12 +182,11 @@ public void run() { try { Files.move(tmpFile, path, StandardCopyOption.ATOMIC_MOVE); } catch (Exception e) { - LOGGER.error("Unable to rename temp file {} to {}", tmpFile, path); - LOGGER.error("Exception during rename", e); + LOGGER.error("Unable to rename temp file {} to {}", tmpFile, path, e); } } catch (Throwable t) { - // catch all errors (throwable and execptions to prevent subsequent tasks from being suppressed) - LOGGER.error("Error executing scheduled task ", t); + // catch all errors (throwable and exceptions to prevent subsequent tasks from being suppressed) + LOGGER.error("Error executing scheduled task", t); } finally { // If something happened and we were not able to rename the temp file, attempt to remove it if (tmpFile != null && tmpFile.toFile().exists()) { @@ -187,7 +194,7 @@ public void run() { try { Files.delete(tmpFile); } catch (Exception e) { - LOGGER.error("failed to delete temporary metrics file " + tmpFile, e); + LOGGER.error("failed to delete temporary metrics file {}", tmpFile, e); } } } diff --git a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java index 2707987f0b..c5cbf183d8 100644 --- a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java +++ b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java @@ -30,6 +30,7 @@ import java.util.HashMap; import java.util.Map; +import java.util.Optional; /** * PerfLogger. @@ -230,20 +231,21 @@ public Long getDuration(String method) { transient Map openScopes = new HashMap(); private void beginMetrics(String method) { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { - MetricsScope scope = metrics.createScope(MetricsConstant.API_PREFIX + method); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + MetricsScope scope = + metrics.get().createScope(MetricsConstant.API_PREFIX + method); openScopes.put(method, scope); } } private void endMetrics(String method) { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { - MetricsScope scope = openScopes.remove(method); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + final MetricsScope scope = openScopes.remove(method); if (scope != null) { - metrics.endScope(scope); + metrics.get().endScope(scope); } } } @@ -252,10 +254,10 @@ private void endMetrics(String method) { * Cleans up any dangling perfLog metric call scopes. */ public void cleanupPerfLogMetrics() { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { for (MetricsScope openScope : openScopes.values()) { - metrics.endScope(openScope); + metrics.get().endScope(openScope); } } openScopes.clear(); diff --git a/common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java b/common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java index 88a4c73006..515ce1b518 100644 --- a/common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java +++ b/common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java @@ -64,13 +64,4 @@ public static JsonNode getJsonNode(String json, MetricsCategory category, String JsonNode metricsNode = categoryNode.path(metricsName); return metricsNode.path(category.metricsHandle); } - - public static byte[] getFileData(String path, int timeoutInterval, int tries) throws Exception { - File file = new File(path); - do { - Thread.sleep(timeoutInterval); - tries--; - } while (tries > 0 && !file.exists()); - return Files.readAllBytes(Paths.get(path)); - } } diff --git a/common/src/test/org/apache/hadoop/hive/common/metrics/TestLegacyMetrics.java b/common/src/test/org/apache/hadoop/hive/common/metrics/TestLegacyMetrics.java index 1d477f6a47..774b5e7c2d 100644 --- a/common/src/test/org/apache/hadoop/hive/common/metrics/TestLegacyMetrics.java +++ b/common/src/test/org/apache/hadoop/hive/common/metrics/TestLegacyMetrics.java @@ -49,7 +49,7 @@ public void before() throws Exception { HiveConf conf = new HiveConf(); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, LegacyMetrics.class.getCanonicalName()); MetricsFactory.init(conf); - metrics = (LegacyMetrics) MetricsFactory.getInstance(); + metrics = (LegacyMetrics) MetricsFactory.getInstance().get(); } @After diff --git a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java index 1c49d9575f..a4aeeeddc7 100644 --- a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java +++ b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java @@ -79,7 +79,8 @@ public static void setUp() throws Exception { TimeUnit.MILLISECONDS); MetricsFactory.init(conf); - metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance()).getMetricRegistry(); + metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance().get()) + .getMetricRegistry(); } @AfterClass @@ -96,8 +97,8 @@ public static void cleanup() { public void testScope() throws Exception { int runs = 5; for (int i = 0; i < runs; i++) { - MetricsFactory.getInstance().startStoredScope("method1"); - MetricsFactory.getInstance().endStoredScope("method1"); + MetricsFactory.getInstance().get().startStoredScope("method1"); + MetricsFactory.getInstance().get().endStoredScope("method1"); Timer timer = metricRegistry.getTimers().get("method1"); Assert.assertEquals(i + 1, timer.getCount()); } @@ -111,7 +112,7 @@ public void testScope() throws Exception { public void testCount() throws Exception { int runs = 5; for (int i = 0; i < runs; i++) { - MetricsFactory.getInstance().incrementCounter("count1"); + MetricsFactory.getInstance().get().incrementCounter("count1"); Counter counter = metricRegistry.getCounters().get("count1"); Assert.assertEquals(i + 1, counter.getCount()); } @@ -126,8 +127,8 @@ public void testConcurrency() throws Exception { executorService.submit(new Callable() { @Override public Void call() throws Exception { - MetricsFactory.getInstance().startStoredScope("method2"); - MetricsFactory.getInstance().endStoredScope("method2"); + MetricsFactory.getInstance().get().startStoredScope("method2"); + MetricsFactory.getInstance().get().endStoredScope("method2"); return null; } }); @@ -155,7 +156,7 @@ public void testFileReporting() throws Exception { int runs = 5; String counterName = "count2"; for (int i = 0; i < runs; i++) { - MetricsFactory.getInstance().incrementCounter(counterName); + MetricsFactory.getInstance().get().incrementCounter(counterName); sleep(REPORT_INTERVAL_MS + REPORT_INTERVAL_MS / 2); Assert.assertEquals(i + 1, getCounterValue(counterName)); } @@ -178,29 +179,35 @@ public void testGauge() throws Exception { TestMetricsVariable testVar = new TestMetricsVariable(); testVar.setValue(20); - MetricsFactory.getInstance().addGauge("gauge1", testVar); - String json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); - MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "gauge1", testVar.getValue()); - + MetricsFactory.getInstance().get().addGauge("gauge1", testVar); + String json = + ((CodahaleMetrics) MetricsFactory.getInstance().get()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "gauge1", + testVar.getValue()); testVar.setValue(40); - json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); - MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "gauge1", testVar.getValue()); + json = ((CodahaleMetrics) MetricsFactory.getInstance().get()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "gauge1", + testVar.getValue()); } @Test public void testMeter() throws Exception { - String json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); - MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.METER, "meter", ""); + String json = + ((CodahaleMetrics) MetricsFactory.getInstance().get()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.METER, "meter", + ""); - MetricsFactory.getInstance().markMeter("meter"); - json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); - MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.METER, "meter", "1"); + MetricsFactory.getInstance().get().markMeter("meter"); + json = ((CodahaleMetrics) MetricsFactory.getInstance().get()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.METER, "meter", + "1"); - MetricsFactory.getInstance().markMeter("meter"); - json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); - MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.METER, "meter", "2"); + MetricsFactory.getInstance().get().markMeter("meter"); + json = ((CodahaleMetrics) MetricsFactory.getInstance().get()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.METER, "meter", + "2"); } /** diff --git a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleReportersConf.java b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleReportersConf.java index e89a605bb3..4fec30433c 100644 --- a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleReportersConf.java +++ b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleReportersConf.java @@ -20,12 +20,15 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import java.lang.reflect.InvocationTargetException; +import java.nio.file.Files; +import java.nio.file.Paths; + import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.hive.common.metrics.MetricsTestUtils; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.junit.After; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; import java.io.File; @@ -35,8 +38,14 @@ */ public class TestCodahaleReportersConf { - private static File workDir = new File(System.getProperty("test.tmp.dir")); - private static File jsonReportFile; + private final File workDir = new File(System.getProperty("test.tmp.dir")); + private File jsonReportFile; + + @Before + public void setup() { + jsonReportFile = new File(workDir, "json_reporting"); + jsonReportFile.delete(); + } @After public void after() throws Exception { @@ -51,9 +60,6 @@ public void testFallbackToDeprecatedConfig() throws Exception { HiveConf conf = new HiveConf(); - jsonReportFile = new File(workDir, "json_reporting"); - jsonReportFile.delete(); - conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "local"); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName()); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, "JMX, JSON"); @@ -64,11 +70,14 @@ public void testFallbackToDeprecatedConfig() throws Exception { int runs = 5; for (int i = 0; i < runs; i++) { - MetricsFactory.getInstance().incrementCounter("count2"); + MetricsFactory.getInstance().get().incrementCounter("count2"); } + MetricsFactory.getInstance().get().close(); + // we expect json file to be updated - byte[] jsonData = MetricsTestUtils.getFileData(jsonReportFile.getAbsolutePath(), 2000, 3); + byte[] jsonData = + Files.readAllBytes(Paths.get(jsonReportFile.getAbsolutePath())); ObjectMapper objectMapper = new ObjectMapper(); JsonNode rootNode = objectMapper.readTree(jsonData); @@ -90,9 +99,6 @@ public void testNoFallback() throws Exception { HiveConf conf = new HiveConf(); - jsonReportFile = new File(workDir, "json_reporting"); - jsonReportFile.delete(); - conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "local"); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName()); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, "JMX, JSON"); @@ -105,7 +111,7 @@ public void testNoFallback() throws Exception { int runs = 5; for (int i = 0; i < runs; i++) { - MetricsFactory.getInstance().incrementCounter("count2"); + MetricsFactory.getInstance().get().incrementCounter("count2"); } Assert.assertFalse(jsonReportFile.exists()); @@ -123,9 +129,6 @@ public void testNoFallbackOnIncorrectConf() throws Exception { HiveConf conf = new HiveConf(); - jsonReportFile = new File(workDir, "json_reporting"); - jsonReportFile.delete(); - conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, "local"); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_CLASS, CodahaleMetrics.class.getCanonicalName()); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, "JMX, JSON"); @@ -136,8 +139,10 @@ public void testNoFallbackOnIncorrectConf() throws Exception { try { MetricsFactory.init(conf); - } catch (InvocationTargetException expectedException) { - + } catch (InvocationTargetException e) { + Assert.fail(); + } finally { + MetricsFactory.getInstance().get().close(); } Assert.assertFalse(jsonReportFile.exists()); diff --git a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestMetricVariableRatioGauge.java b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestMetricVariableRatioGauge.java index 3166654a5b..fca4fdd640 100644 --- a/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestMetricVariableRatioGauge.java +++ b/common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestMetricVariableRatioGauge.java @@ -41,7 +41,8 @@ public void before() throws Exception { conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, "60000m"); MetricsFactory.init(conf); - metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance()).getMetricRegistry(); + metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance().get()) + .getMetricRegistry(); } @After @@ -54,8 +55,9 @@ public void testRatioIsCalculated() throws Exception { NumericVariable num = new NumericVariable(10); NumericVariable ord = new NumericVariable(5); - MetricsFactory.getInstance().addRatio("rat", num, ord); - String json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); + MetricsFactory.getInstance().get().addRatio("rat", num, ord); + String json = + ((CodahaleMetrics) MetricsFactory.getInstance().get()).dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", 2d); } @@ -64,19 +66,23 @@ public void testRatioIsCalculatedNonExact() throws Exception { NumericVariable num = new NumericVariable(20); NumericVariable ord = new NumericVariable(3); - MetricsFactory.getInstance().addRatio("rat", num, ord); - String json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); - MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", 6.6666d, 1e-4); + MetricsFactory.getInstance().get().addRatio("rat", num, ord); + String json = + ((CodahaleMetrics) MetricsFactory.getInstance().get()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", + 6.6666d, 1e-4); } @Test(expected = IllegalArgumentException.class) public void testMissingNumeratorRatio() throws Exception { - MetricsFactory.getInstance().addRatio("rat", null, new NumericVariable(5)); + MetricsFactory.getInstance().get().addRatio("rat", null, + new NumericVariable(5)); } @Test(expected = IllegalArgumentException.class) public void testMissingDenominatorRatio() throws Exception { - MetricsFactory.getInstance().addRatio("rat", new NumericVariable(5), null); + MetricsFactory.getInstance().get().addRatio("rat", new NumericVariable(5), + null); } @Test @@ -84,9 +90,11 @@ public void testEmptyRatio() throws Exception { NumericVariable num = new NumericVariable(null); NumericVariable ord = new NumericVariable(null); - MetricsFactory.getInstance().addRatio("rat", num, ord); - String json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); - MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", "NaN"); + MetricsFactory.getInstance().get().addRatio("rat", num, ord); + String json = + ((CodahaleMetrics) MetricsFactory.getInstance().get()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", + "NaN"); } @Test @@ -94,9 +102,11 @@ public void testZeroRatio() throws Exception { NumericVariable num = new NumericVariable(10); NumericVariable ord = new NumericVariable(0); - MetricsFactory.getInstance().addRatio("rat", num, ord); - String json = ((CodahaleMetrics) MetricsFactory.getInstance()).dumpJson(); - MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", "NaN"); + MetricsFactory.getInstance().get().addRatio("rat", num, ord); + String json = + ((CodahaleMetrics) MetricsFactory.getInstance().get()).dumpJson(); + MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.GAUGE, "rat", + "NaN"); } private class NumericVariable implements MetricsVariable { diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2ConnectionMetricsBinary.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2ConnectionMetricsBinary.java index 6677b45a73..1b1dbf3806 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2ConnectionMetricsBinary.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2ConnectionMetricsBinary.java @@ -52,7 +52,8 @@ public static void tearDown() { @Test public void testOpenConnectionMetrics() throws Exception { - CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); + CodahaleMetrics metrics = + (CodahaleMetrics) MetricsFactory.getInstance().get(); String[] beelineArgs = { "-u", miniHS2.getBaseJdbcURL() + "default", "-n", USERNAME, diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2ConnectionMetricsHttp.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2ConnectionMetricsHttp.java index 5852535a7a..3e5ff0b271 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2ConnectionMetricsHttp.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2ConnectionMetricsHttp.java @@ -61,7 +61,8 @@ public static void tearDown() { @Test public void testOpenConnectionMetrics() throws Exception { - CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); + CodahaleMetrics metrics = + (CodahaleMetrics) MetricsFactory.getInstance().get(); TCLIService.Client httpClient = getHttpClient(); TOpenSessionReq openSessionReq = new TOpenSessionReq(); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java index d6631729d1..ffbf5a96f0 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHs2Metrics.java @@ -52,7 +52,7 @@ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException { try { - CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); + CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance().get(); String json = metrics.dumpJson(); //Pre-analyze hook is fired in the middle of these calls MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, "active_calls_api_semanticAnalyze", 1); @@ -100,7 +100,8 @@ public void testMetrics() throws Exception { serviceClient.executeStatement(sessHandle, "CREATE TABLE " + tableName + " (id INT)", confOverlay); //check that all calls were recorded. - CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); + CodahaleMetrics metrics = + (CodahaleMetrics) MetricsFactory.getInstance().get(); String json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_operation_INITIALIZED", 1); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_hs2_operation_PENDING", 1); @@ -134,7 +135,8 @@ public void testClosedScopes() throws Exception { Assert.assertNotNull("Expected semantic exception", expectedException); //verify all scopes were recorded - CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); + CodahaleMetrics metrics = + (CodahaleMetrics) MetricsFactory.getInstance().get(); String json = metrics.dumpJson(); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_parse", 1); MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.TIMER, "api_semanticAnalyze", 1); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 91910d1c0c..ac922d495b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -33,6 +33,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Queue; import java.util.Set; import java.util.stream.Collectors; @@ -1314,9 +1315,9 @@ public void lockAndRespond() throws CommandProcessorException { } private void compileInternal(String command, boolean deferClose) throws CommandProcessorException { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { - metrics.incrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + metrics.get().incrementCounter(MetricsConstant.WAITING_COMPILE_OPS); } PerfLogger perfLogger = SessionState.getPerfLogger(true); @@ -1327,8 +1328,8 @@ private void compileInternal(String command, boolean deferClose) throws CommandP perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.WAIT_COMPILE); - if (metrics != null) { - metrics.decrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1); + if (metrics.isPresent()) { + metrics.get().decrementCounter(MetricsConstant.WAITING_COMPILE_OPS); } if (!success) { String errorMessage = ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCodedMsg(); @@ -1671,7 +1672,7 @@ private void execute() throws CommandProcessorException { } else { maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH); } - Metrics metrics = MetricsFactory.getInstance(); + final Optional metrics = MetricsFactory.getInstance(); String queryId = plan.getQueryId(); // Get the query string from the conf file as the compileInternal() method might @@ -1768,8 +1769,8 @@ private void execute() throws CommandProcessorException { assert tsk.getParentTasks() == null || tsk.getParentTasks().isEmpty(); driverCxt.addToRunnable(tsk); - if (metrics != null) { - tsk.updateTaskMetrics(metrics); + if (metrics.isPresent()) { + tsk.updateTaskMetrics(metrics.get()); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java b/ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java index 54c2425135..9ce402cff4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/cache/results/QueryResultsCache.java @@ -30,6 +30,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.concurrent.ExecutorService; @@ -397,9 +398,9 @@ public static void initialize(HiveConf conf) throws IOException { try { instance = new QueryResultsCache(conf); - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { - registerMetrics(metrics, instance); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + registerMetrics(metrics.get(), instance); } } catch (Exception err) { inited.set(false); @@ -908,16 +909,16 @@ public void run() { } public static void incrementMetric(String name, long count) { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { - metrics.incrementCounter(name, count); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + metrics.get().incrementCounter(name, count); } } public static void decrementMetric(String name, long count) { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { - metrics.decrementCounter(name, count); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + metrics.get().decrementCounter(name, count); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmPoolMetrics.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmPoolMetrics.java index 19b035e7cb..d751d17968 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmPoolMetrics.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/WmPoolMetrics.java @@ -23,6 +23,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -97,8 +98,11 @@ public void initAfterRegister() { } // Set up codahale if enabled; we cannot tag the values so just prefix them for the JMX view. - Metrics chMetrics = MetricsFactory.getInstance(); - if (!(chMetrics instanceof CodahaleMetrics)) return; + final Optional chMetrics = MetricsFactory.getInstance(); + if (chMetrics.isPresent() + && !(chMetrics.get() instanceof CodahaleMetrics)) { + return; + } List codahaleNames = new ArrayList<>(); for (Map.Entry e : allMetrics.entrySet()) { @@ -112,7 +116,7 @@ public void initAfterRegister() { if (var == null) continue; // Unexpected metric type. String name = "WM_" + poolName + "_" + e.getKey(); codahaleNames.add(name); - chMetrics.addGauge(name, var); + chMetrics.get().addGauge(name, var); } this.codahaleGaugeNames = codahaleNames; } @@ -177,9 +181,11 @@ public void destroy() { ms.unregisterSource(sourceName); ms = null; if (codahaleGaugeNames != null) { - Metrics metrics = MetricsFactory.getInstance(); - for (String chgName : codahaleGaugeNames) { - metrics.removeGauge(chgName); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + for (String chgName : codahaleGaugeNames) { + metrics.get().removeGauge(chgName); + } } codahaleGaugeNames = null; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/MetricsQueryLifeTimeHook.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/MetricsQueryLifeTimeHook.java index c864d5e60c..9c968899f8 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/MetricsQueryLifeTimeHook.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/MetricsQueryLifeTimeHook.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hive.ql.hooks; +import java.util.Optional; + import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -28,35 +30,37 @@ */ public class MetricsQueryLifeTimeHook implements QueryLifeTimeHook { - private Metrics metrics = MetricsFactory.getInstance(); + private Optional metrics = MetricsFactory.getInstance(); private MetricsScope compilingQryScp; private MetricsScope executingQryScp; @Override public void beforeCompile(QueryLifeTimeHookContext ctx) { - if (metrics != null) { - compilingQryScp = metrics.createScope(MetricsConstant.HS2_COMPILING_QUERIES); + if (metrics.isPresent()) { + compilingQryScp = + metrics.get().createScope(MetricsConstant.HS2_COMPILING_QUERIES); } } @Override public void afterCompile(QueryLifeTimeHookContext ctx, boolean hasError) { - if (metrics != null && compilingQryScp != null) { - metrics.endScope(compilingQryScp); + if (metrics.isPresent() && compilingQryScp != null) { + metrics.get().endScope(compilingQryScp); } } @Override public void beforeExecution(QueryLifeTimeHookContext ctx) { - if (metrics != null) { - executingQryScp = metrics.createScope(MetricsConstant.HS2_EXECUTING_QUERIES); + if (metrics.isPresent()) { + executingQryScp = + metrics.get().createScope(MetricsConstant.HS2_EXECUTING_QUERIES); } } @Override public void afterExecution(QueryLifeTimeHookContext ctx, boolean hasError) { - if (metrics != null && executingQryScp != null) { - metrics.endScope(executingQryScp); + if (metrics.isPresent() && executingQryScp != null) { + metrics.get().endScope(executingQryScp); } } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java index a8b9653411..4f89a95f5b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java @@ -38,6 +38,7 @@ import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -157,10 +158,10 @@ else if(l.txnId == 0) { } acquiredLocks.add(hl); - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - metrics.incrementCounter(MetricsConstant.METASTORE_HIVE_LOCKS); + metrics.get().incrementCounter(MetricsConstant.METASTORE_HIVE_LOCKS); } catch (Exception e) { LOG.warn("Error Reporting hive client metastore lock operation to Metrics system", e); } @@ -218,10 +219,10 @@ public void unlock(HiveLock hiveLock) throws LockException { txnManager.getMS().unlock(lockId); //important to remove after unlock() in case it fails removed = locks.remove(hiveLock); - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - metrics.decrementCounter(MetricsConstant.METASTORE_HIVE_LOCKS); + metrics.get().decrementCounter(MetricsConstant.METASTORE_HIVE_LOCKS); } catch (Exception e) { LOG.warn("Error Reporting hive client metastore unlock operation to Metrics system", e); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java index 0610758444..b4cd84c210 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java @@ -45,6 +45,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Queue; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -449,23 +450,28 @@ private ZooKeeperHiveLock lockPrimitive(HiveLockObject key, return null; } } - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - switch(mode) { + switch (mode) { case EXCLUSIVE: - metrics.incrementCounter(MetricsConstant.ZOOKEEPER_HIVE_EXCLUSIVELOCKS); + metrics.get() + .incrementCounter(MetricsConstant.ZOOKEEPER_HIVE_EXCLUSIVELOCKS); break; case SEMI_SHARED: - metrics.incrementCounter(MetricsConstant.ZOOKEEPER_HIVE_SEMISHAREDLOCKS); + metrics.get() + .incrementCounter(MetricsConstant.ZOOKEEPER_HIVE_SEMISHAREDLOCKS); break; default: - metrics.incrementCounter(MetricsConstant.ZOOKEEPER_HIVE_SHAREDLOCKS); + metrics.get() + .incrementCounter(MetricsConstant.ZOOKEEPER_HIVE_SHAREDLOCKS); break; } } catch (Exception e) { - LOG.warn("Error Reporting hive client zookeeper lock operation to Metrics system", e); + LOG.warn( + "Error Reporting hive client zookeeper lock operation to Metrics system", + e); } } return new ZooKeeperHiveLock(res, key, mode); @@ -529,18 +535,21 @@ static void unlockPrimitive(HiveLock hiveLock, String parent, CuratorFramework c curatorFramework.delete().forPath(name); } } - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - switch(lMode) { + switch (lMode) { case EXCLUSIVE: - metrics.decrementCounter(MetricsConstant.ZOOKEEPER_HIVE_EXCLUSIVELOCKS); + metrics.get().decrementCounter( + MetricsConstant.ZOOKEEPER_HIVE_EXCLUSIVELOCKS); break; case SEMI_SHARED: - metrics.decrementCounter(MetricsConstant.ZOOKEEPER_HIVE_SEMISHAREDLOCKS); + metrics.get().decrementCounter( + MetricsConstant.ZOOKEEPER_HIVE_SEMISHAREDLOCKS); break; default: - metrics.decrementCounter(MetricsConstant.ZOOKEEPER_HIVE_SHAREDLOCKS); + metrics.get() + .decrementCounter(MetricsConstant.ZOOKEEPER_HIVE_SHAREDLOCKS); break; } } catch (Exception e) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java b/ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java index 5921044fea..56fed0a294 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java @@ -355,7 +355,7 @@ private void verifyThatWaitingCompileOpsCountIsEqualTo(long count) { } private Counter getCounter(String counter) { - CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); + CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance().get(); SortedMap counters = metrics.getMetricRegistry().getCounters(); assertNotNull(counters); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestMetricsQueryLifeTimeHook.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestMetricsQueryLifeTimeHook.java index beabd94306..06328bb91d 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestMetricsQueryLifeTimeHook.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestMetricsQueryLifeTimeHook.java @@ -50,7 +50,8 @@ public void before() throws Exception { conf.setVar(HiveConf.ConfVars.HIVE_METRICS_JSON_FILE_INTERVAL, "100000s"); MetricsFactory.init(conf); - metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance()).getMetricRegistry(); + metricRegistry = ((CodahaleMetrics) MetricsFactory.getInstance().get()) + .getMetricRegistry(); hook = new MetricsQueryLifeTimeHook(); ctx = new QueryLifeTimeHookContextImpl(); @@ -107,7 +108,7 @@ public void testNoErrorOnDisabledMetrics() throws Exception { MetricsFactory.close(); MetricsQueryLifeTimeHook emptyhook = new MetricsQueryLifeTimeHook(); - assertThat(MetricsFactory.getInstance(), nullValue()); + assertThat(MetricsFactory.getInstance().get(), nullValue()); emptyhook.beforeCompile(ctx); emptyhook.afterCompile(ctx, false); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java index 4482f86dc0..17c757ac9c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java @@ -133,7 +133,7 @@ public void testMetrics() throws Exception{ conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name()); MetricsFactory.init(conf); - CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance(); + CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance().get(); HiveLockManagerCtx ctx = new HiveLockManagerCtx(conf); ZooKeeperHiveLockManager zMgr= new ZooKeeperHiveLockManager(); diff --git a/service/src/java/org/apache/hive/service/cli/operation/Operation.java b/service/src/java/org/apache/hive/service/cli/operation/Operation.java index 5036d5907c..77b070a5e7 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/Operation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/Operation.java @@ -21,6 +21,7 @@ import java.util.EnumSet; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; @@ -261,9 +262,9 @@ protected void afterRun() { public void run() throws HiveSQLException { beforeRun(); try { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { - metrics.incrementCounter(MetricsConstant.OPEN_OPERATIONS); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + metrics.get().incrementCounter(MetricsConstant.OPEN_OPERATIONS); } runInternal(); } finally { @@ -376,19 +377,21 @@ protected HiveSQLException toSQLException(String prefix, CommandProcessorExcepti OperationState.UNKNOWN ); - protected final MetricsScope updateOperationStateMetrics(MetricsScope stateScope, String operationPrefix, + protected final MetricsScope updateOperationStateMetrics( + MetricsScope stateScope, String operationPrefix, String completedOperationPrefix, OperationState state) { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { if (stateScope != null) { - metrics.endScope(stateScope); + metrics.get().endScope(stateScope); stateScope = null; } if (scopeStates.contains(state)) { - stateScope = metrics.createScope(MetricsConstant.API_PREFIX + operationPrefix + state); + stateScope = metrics.get() + .createScope(MetricsConstant.API_PREFIX + operationPrefix + state); } if (terminalStates.contains(state)) { - metrics.incrementCounter(completedOperationPrefix + state); + metrics.get().incrementCounter(completedOperationPrefix + state); } } return stateScope; diff --git a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java index 506ffe6721..88d561069d 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java +++ b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java @@ -27,6 +27,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -241,10 +242,10 @@ private Operation removeTimedOutOperation(OperationHandle operationHandle) { Operation operation = handleToOperation.get(operationHandle); if (operation != null && operation.isTimedOut(System.currentTimeMillis())) { LOG.info("Operation is timed out,operation=" + operation.getHandle() + ",state=" + operation.getState().toString()); - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - metrics.decrementCounter(MetricsConstant.OPEN_OPERATIONS); + metrics.get().decrementCounter(MetricsConstant.OPEN_OPERATIONS); } catch (Exception e) { LOG.warn("Error decrementing open_operations metric, reported values may be incorrect", e); } @@ -310,10 +311,10 @@ public void cancelOperation(OperationHandle opHandle) throws HiveSQLException { public void closeOperation(OperationHandle opHandle) throws HiveSQLException { LOG.info("Closing operation: " + opHandle); Operation operation = removeOperation(opHandle); - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - metrics.decrementCounter(MetricsConstant.OPEN_OPERATIONS); + metrics.get().decrementCounter(MetricsConstant.OPEN_OPERATIONS); } catch (Exception e) { LOG.warn("Error Reporting close operation to Metrics system", e); } diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index b87b670652..1c469c9742 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -29,6 +29,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Properties; import java.util.concurrent.Future; import java.util.concurrent.RejectedExecutionException; @@ -123,9 +124,10 @@ public SQLOperation(HiveSession parentSession, String statement, Map metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + submittedQryScp = + metrics.get().createScope(MetricsConstant.HS2_SUBMITTED_QURIES); } } @@ -615,23 +617,23 @@ protected void onNewState(OperationState state, OperationState prevState) { MetricsConstant.SQL_OPERATION_PREFIX, MetricsConstant.COMPLETED_SQL_OPERATION_PREFIX, state); - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { // New state is changed to running from something else (user is active) if (state == OperationState.RUNNING && prevState != state) { - incrementUserQueries(metrics); + incrementUserQueries(metrics.get()); } // New state is not running (user not active) any more if (prevState == OperationState.RUNNING && prevState != state) { - decrementUserQueries(metrics); + decrementUserQueries(metrics.get()); } } if (state == OperationState.FINISHED || state == OperationState.CANCELED || state == OperationState.ERROR) { //update runtime queryInfo.setRuntime(getOperationComplete() - getOperationStart()); - if (metrics != null && submittedQryScp != null) { - metrics.endScope(submittedQryScp); + if (metrics.isPresent() && submittedQryScp != null) { + metrics.get().endScope(submittedQryScp); } } @@ -642,11 +644,17 @@ protected void onNewState(OperationState state, OperationState prevState) { queryInfo.updateState(state.toString()); } - if (state == OperationState.ERROR) { - markQueryMetric(MetricsFactory.getInstance(), MetricsConstant.HS2_FAILED_QUERIES); - } - if (state == OperationState.FINISHED) { - markQueryMetric(MetricsFactory.getInstance(), MetricsConstant.HS2_SUCCEEDED_QUERIES); + if (metrics.isPresent()) { + switch (state) { + case ERROR: + metrics.get().markMeter(MetricsConstant.HS2_FAILED_QUERIES); + break; + case FINISHED: + metrics.get().markMeter(MetricsConstant.HS2_SUCCEEDED_QUERIES); + break; + default: + break; + } } } @@ -682,12 +690,6 @@ private void decrementUserQueries(Metrics metrics) { } } - private void markQueryMetric(Metrics metric, String name) { - if(metric != null) { - metric.markMeter(name); - } - } - public String getExecutionEngine() { return queryState.getConf().getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE); } diff --git a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java index 277519cba5..c4c4701b20 100644 --- a/service/src/java/org/apache/hive/service/cli/session/SessionManager.java +++ b/service/src/java/org/apache/hive/service/cli/session/SessionManager.java @@ -27,6 +27,7 @@ import java.util.Date; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Future; @@ -115,10 +116,10 @@ public synchronized void init(HiveConf hiveConf) { createBackgroundOperationPool(); addService(operationManager); initSessionImplClassName(); - Metrics metrics = MetricsFactory.getInstance(); - if(metrics != null){ - registerOpenSesssionMetrics(metrics); - registerActiveSesssionMetrics(metrics); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + registerOpenSesssionMetrics(metrics.get()); + registerActiveSesssionMetrics(metrics.get()); } userLimit = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER); @@ -215,20 +216,22 @@ private void createBackgroundOperationPool() { checkOperation = HiveConf.getBoolVar(hiveConf, ConfVars.HIVE_SERVER2_IDLE_SESSION_CHECK_OPERATION); - Metrics m = MetricsFactory.getInstance(); - if (m != null) { - m.addGauge(MetricsConstant.EXEC_ASYNC_QUEUE_SIZE, new MetricsVariable() { - @Override - public Object getValue() { - return queue.size(); - } - }); - m.addGauge(MetricsConstant.EXEC_ASYNC_POOL_SIZE, new MetricsVariable() { - @Override - public Object getValue() { - return backgroundOperationPool.getPoolSize(); - } - }); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + metrics.get().addGauge(MetricsConstant.EXEC_ASYNC_QUEUE_SIZE, + new MetricsVariable() { + @Override + public Object getValue() { + return queue.size(); + } + }); + metrics.get().addGauge(MetricsConstant.EXEC_ASYNC_POOL_SIZE, + new MetricsVariable() { + @Override + public Object getValue() { + return backgroundOperationPool.getPoolSize(); + } + }); } } @@ -294,9 +297,10 @@ public void run() { } catch (HiveSQLException e) { LOG.warn("Exception is thrown closing session " + handle, e); } finally { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { - metrics.incrementCounter(MetricsConstant.HS2_ABANDONED_SESSIONS); + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { + metrics.get() + .incrementCounter(MetricsConstant.HS2_ABANDONED_SESSIONS); } } } else { diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java index df2d3a7b71..c1df07b1aa 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java @@ -20,6 +20,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.TimeUnit; @@ -107,11 +108,12 @@ protected void initServer() { server.setServerEventHandler(new TServerEventHandler() { @Override public ServerContext createContext(TProtocol input, TProtocol output) { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - metrics.incrementCounter(MetricsConstant.OPEN_CONNECTIONS); - metrics.incrementCounter(MetricsConstant.CUMULATIVE_CONNECTION_COUNT); + metrics.get().incrementCounter(MetricsConstant.OPEN_CONNECTIONS); + metrics.get().incrementCounter( + MetricsConstant.CUMULATIVE_CONNECTION_COUNT); } catch (Exception e) { LOG.warn("Error Reporting JDO operation to Metrics system", e); } @@ -121,10 +123,10 @@ public ServerContext createContext(TProtocol input, TProtocol output) { @Override public void deleteContext(ServerContext serverContext, TProtocol input, TProtocol output) { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - metrics.decrementCounter(MetricsConstant.OPEN_CONNECTIONS); + metrics.get().decrementCounter(MetricsConstant.OPEN_CONNECTIONS); } catch (Exception e) { LOG.warn("Error Reporting JDO operation to Metrics system", e); } diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java index 89271d7020..5d580a4b3d 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java @@ -19,6 +19,7 @@ package org.apache.hive.service.cli.thrift; import java.util.Arrays; +import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.TimeUnit; @@ -199,11 +200,12 @@ public void onClosed(Connection connection) { } private void openConnection() { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - metrics.incrementCounter(MetricsConstant.OPEN_CONNECTIONS); - metrics.incrementCounter(MetricsConstant.CUMULATIVE_CONNECTION_COUNT); + metrics.get().incrementCounter(MetricsConstant.OPEN_CONNECTIONS); + metrics.get() + .incrementCounter(MetricsConstant.CUMULATIVE_CONNECTION_COUNT); } catch (Exception e) { LOG.warn("Error reporting HS2 open connection operation to Metrics system", e); } @@ -211,10 +213,10 @@ private void openConnection() { } private void closeConnection() { - Metrics metrics = MetricsFactory.getInstance(); - if (metrics != null) { + final Optional metrics = MetricsFactory.getInstance(); + if (metrics.isPresent()) { try { - metrics.decrementCounter(MetricsConstant.OPEN_CONNECTIONS); + metrics.get().decrementCounter(MetricsConstant.OPEN_CONNECTIONS); } catch (Exception e) { LOG.warn("Error reporting HS2 close connection operation to Metrics system", e); } diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java b/service/src/java/org/apache/hive/service/server/HiveServer2.java index 17570f73bc..7ccb18ce54 100644 --- a/service/src/java/org/apache/hive/service/server/HiveServer2.java +++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java @@ -868,7 +868,7 @@ public synchronized void stop() { } } // Shutdown Metrics - if (MetricsFactory.getInstance() != null) { + if (MetricsFactory.getInstance().isPresent()) { try { MetricsFactory.close(); } catch (Exception e) { diff --git a/service/src/test/org/apache/hive/service/cli/operation/TestSQLOperationMetrics.java b/service/src/test/org/apache/hive/service/cli/operation/TestSQLOperationMetrics.java index df538c607e..6153b9312a 100644 --- a/service/src/test/org/apache/hive/service/cli/operation/TestSQLOperationMetrics.java +++ b/service/src/test/org/apache/hive/service/cli/operation/TestSQLOperationMetrics.java @@ -55,12 +55,12 @@ public void setup() throws Exception { operation = new SQLOperation(session, "select * from dummy", Maps.newHashMap(), false, 0L); - metrics = (CodahaleMetrics) MetricsFactory.getInstance(); + metrics = (CodahaleMetrics) MetricsFactory.getInstance().get(); } @After public void tearDown() throws Exception { - MetricsFactory.getInstance().close(); + MetricsFactory.getInstance().get().close(); } @Test diff --git a/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java b/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java index be8d70b56a..390a627005 100644 --- a/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java +++ b/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java @@ -82,7 +82,7 @@ public void setup() throws Exception { sm = new SessionManager(null, true); sm.init(conf); - metrics = (CodahaleMetrics) MetricsFactory.getInstance(); + metrics = (CodahaleMetrics) MetricsFactory.getInstance().get(); Hive doNothingHive = mock(Hive.class); Hive.set(doNothingHive);