diff --git standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java index fb0b2fe6fb..a3cb5c3143 100644 --- standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java +++ standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java @@ -673,8 +673,15 @@ public static ConfVars getMetaConf(String name) { "hive.service.metrics.file.location", "/tmp/report.json", "For metric class json metric reporter, the location of local JSON metrics file. " + "This file will get overwritten at every interval."), + METRICS_SLF4J_LOG_FREQUENCY_MINS("metastore.metrics.slf4j.frequency", + "hive.service.metrics.slf4j.frequency", 5, TimeUnit.MINUTES, + "For SLF4J metric reporter, the frequency of logging metrics events. The default value is 5 mins."), + METRICS_SLF4J_LOG_LEVEL("metastore.metrics.slf4j.logging.level", + "hive.service.metrics.slf4j.logging.level", "INFO", + new StringSetValidator("TRACE", "DEBUG", "INFO", "WARN", "ERROR"), + "For SLF4J metric reporter, the logging level to be used for metrics event logs. The default level is INFO."), METRICS_REPORTERS("metastore.metrics.reporters", "metastore.metrics.reporters", "json,jmx", - new StringSetValidator("json", "jmx", "console", "hadoop"), + new StringSetValidator("json", "jmx", "console", "hadoop", "slf4j"), "A comma separated list of metrics reporters to start"), MSCK_PATH_VALIDATION("msck.path.validation", "hive.msck.path.validation", "throw", new StringSetValidator("throw", "skip", "ignore"), "The approach msck should take with HDFS " + diff --git standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/Metrics.java standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/Metrics.java index e4866a6767..53ee84a325 100644 --- standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/Metrics.java +++ standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/metrics/Metrics.java @@ -17,6 +17,14 @@ */ package org.apache.hadoop.hive.metastore.metrics; +import java.lang.management.ManagementFactory; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + import com.codahale.metrics.ConsoleReporter; import com.codahale.metrics.Counter; import com.codahale.metrics.Gauge; @@ -24,6 +32,8 @@ import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Reporter; import com.codahale.metrics.ScheduledReporter; +import com.codahale.metrics.Slf4jReporter; +import com.codahale.metrics.Slf4jReporter.LoggingLevel; import com.codahale.metrics.Timer; import com.codahale.metrics.jvm.BufferPoolMetricSet; import com.codahale.metrics.jvm.ClassLoadingGaugeSet; @@ -38,14 +48,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.management.ManagementFactory; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; - public class Metrics { private static final Logger LOGGER = LoggerFactory.getLogger(Metrics.class); @@ -230,6 +232,18 @@ private Metrics(Configuration conf) { reporters.add(reporter); scheduledReporters.add(reporter); hadoopMetricsStarted = true; + } else if (reporterName.startsWith("slf4j")) { + final String level = MetastoreConf.getVar(conf, MetastoreConf.ConfVars.METRICS_SLF4J_LOG_LEVEL); + final Slf4jReporter reporter = Slf4jReporter.forRegistry(registry) + .outputTo(LOGGER) + .convertRatesTo(TimeUnit.SECONDS) + .convertDurationsTo(TimeUnit.MILLISECONDS) + .withLoggingLevel(LoggingLevel.valueOf(level)) + .build(); + reporter.start(MetastoreConf.getTimeVar(conf, + MetastoreConf.ConfVars.METRICS_SLF4J_LOG_FREQUENCY_MINS, TimeUnit.SECONDS), TimeUnit.SECONDS); + reporters.add(reporter); + scheduledReporters.add(reporter); } else { throw new RuntimeException("Unknown metric type " + reporterName); } diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java index 6f04288736..40a51751e3 100644 --- standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/metrics/TestMetrics.java @@ -17,22 +17,25 @@ */ package org.apache.hadoop.hive.metastore.metrics; +import java.io.File; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.List; +import java.util.concurrent.TimeUnit; + import com.codahale.metrics.Counter; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest; import org.apache.hadoop.hive.metastore.conf.MetastoreConf; +import org.apache.hadoop.hive.metastore.testutils.CapturingLogAppender; +import org.apache.logging.log4j.Level; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import java.io.File; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.concurrent.TimeUnit; - @Category(MetastoreUnitTest.class) public class TestMetrics { private static final long REPORT_INTERVAL = 1; @@ -42,6 +45,37 @@ public void shutdownMetrics() { Metrics.shutdown(); } + @Test + public void slf4jReporter() throws Exception { + Configuration conf = MetastoreConf.newMetastoreConf(); + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_REPORTERS, "slf4j"); + MetastoreConf.setTimeVar(conf, + MetastoreConf.ConfVars.METRICS_SLF4J_LOG_FREQUENCY_MINS, REPORT_INTERVAL, TimeUnit.SECONDS); + + // 1. Verify the default level (INFO) + validateSlf4jReporter(conf, Level.INFO); + + // 2. Verify an overridden level (DEBUG) + MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METRICS_SLF4J_LOG_LEVEL, "DEBUG"); + validateSlf4jReporter(conf, Level.DEBUG); + } + + private void validateSlf4jReporter(Configuration conf, Level level) throws InterruptedException { + Metrics.initialize(conf); + Counter counter = Metrics.getOrCreateCounter("my-counter"); + counter.inc(5); + // Make sure it has a chance to dump it. + Thread.sleep(REPORT_INTERVAL * 1000 + REPORT_INTERVAL * 1000 / 2); + final List capturedLogMessages = CapturingLogAppender.findLogMessagesContaining(level,"my-counter"); + Assert.assertTrue("Not a single counter message was logged from metrics when " + + "configured for SLF4J metric reporting at level " + level + "!", + capturedLogMessages.size() > 0); + final String logMessage = capturedLogMessages.get(0); + Assert.assertTrue("Counter value is incorrect on captured log message: \"" + logMessage + "\"", + logMessage.contains("count=5")); + Metrics.shutdown(); + } + @Test public void jsonReporter() throws Exception { File jsonReportFile = File.createTempFile("TestMetrics", ".json"); diff --git standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/testutils/CapturingLogAppender.java standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/testutils/CapturingLogAppender.java new file mode 100644 index 0000000000..891e13aac9 --- /dev/null +++ standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/testutils/CapturingLogAppender.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.testutils; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.Layout; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.message.Message; + +/** + * A log appender which captures the log events. This is useful for testing if log events + * are being logged correctly and at correct levels. + */ +@Plugin(name = "CapturingLogAppender", category="Core", elementType="appender", printObject = true) +public class CapturingLogAppender extends AbstractAppender { + private static List EVENTS = new ArrayList<>(); + + private CapturingLogAppender(String name, Filter filter, Layout layout) { + super(name, filter, layout); + } + + @Override + public void append(LogEvent logEvent) { + EVENTS.add(logEvent); + } + + public static List findLogMessagesContaining(Level level, String substring) { + return EVENTS.stream() + .filter(event -> event.getLevel() == level) + .map(LogEvent::getMessage) + .map(Message::getFormattedMessage) + .filter(msg -> msg.contains(substring)) + .collect(Collectors.toList()); + } + + @PluginFactory + public static CapturingLogAppender createAppender(@PluginAttribute("name") String name) { + return new CapturingLogAppender(name, null, null); + } +} diff --git standalone-metastore/metastore-server/src/test/resources/log4j2.properties standalone-metastore/metastore-server/src/test/resources/log4j2.properties index 365687e1c9..7243144ed6 100644 --- standalone-metastore/metastore-server/src/test/resources/log4j2.properties +++ standalone-metastore/metastore-server/src/test/resources/log4j2.properties @@ -17,13 +17,16 @@ name=PropertiesConfig property.filename = logs -appenders = console +appenders = console,captured appender.console.type = Console appender.console.name = STDOUT appender.console.layout.type = PatternLayout appender.console.layout.pattern = [%-5level] %d{yyyy-MM-dd HH:mm:ss.SSS} [%t] %c{1} - %msg%n +appender.captured.type = CapturingLogAppender +appender.captured.name = CAPTURED + loggers=file logger.file.name=guru.springframework.blog.log4j2properties logger.file.level = debug @@ -31,5 +34,6 @@ logger.file.appenderRefs = file logger.file.appenderRef.file.ref = LOGFILE rootLogger.level = debug -rootLogger.appenderRefs = stdout +rootLogger.appenderRefs = stdout,captured rootLogger.appenderRef.stdout.ref = STDOUT +rootLogger.appenderRef.captured.ref = CAPTURED