diff --git a/common/src/main/resources/hive-log4j2.properties b/common/src/main/resources/hive-log4j2.properties index b1719907571ead14686e7fcc14a0687158dec941..3312fd307c13d840799aec2796a5ec316375b8a8 100644 --- a/common/src/main/resources/hive-log4j2.properties +++ b/common/src/main/resources/hive-log4j2.properties @@ -20,12 +20,13 @@ packages = org.apache.hadoop.hive.ql.log # list of properties property.hive.log.level = INFO +# Replace DRFA with routing appender to append @ to the filename if you want separate log files for different CLI session property.hive.root.logger = DRFA property.hive.log.dir = ${sys:java.io.tmpdir}/${sys:user.name} property.hive.log.file = hive.log property.hive.perflogger.log.level = INFO -# list of all appenders +# list of all appenders. Replace DRFA with routing to enable log files separating appenders = console, DRFA # console appender @@ -39,7 +40,6 @@ appender.console.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n appender.DRFA.type = RollingRandomAccessFile appender.DRFA.name = DRFA appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file} -# Use %pid in the filePattern to append @ to the filename if you want separate log files for different CLI session appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd} appender.DRFA.layout.type = PatternLayout appender.DRFA.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n @@ -50,6 +50,39 @@ appender.DRFA.policies.time.modulate = true appender.DRFA.strategy.type = DefaultRolloverStrategy appender.DRFA.strategy.max = 30 +# PID based rolling file appender +property.filename = ${sys:hive.log.dir}/${sys:hive.log.file} + +appender.routing.type = Routing +appender.routing.name = routing +appender.routing.routes.type = Routes +appender.routing.routes.pattern = $${ctx:pid} +appender.routing.routes.route1.type = Route +appender.routing.routes.route1.rolling.type = RollingFile +appender.routing.routes.route1.rolling.name = Routing-${ctx:pid} +appender.routing.routes.route1.rolling.fileName = ${filename}.${ctx:pid} +appender.routing.routes.route1.rolling.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.${ctx:pid}.%d{yyyy-MM-dd} +appender.routing.routes.route1.rolling.layout.type = PatternLayout +appender.routing.routes.route1.rolling.layout.pattern = %d{ISO8601} %5p [%t] %pid %c{2}: %m%n +appender.routing.routes.route1.rolling.policies.type = Policies +appender.routing.routes.route1.rolling.policies.time.type = TimeBasedTriggeringPolicy +appender.routing.routes.route1.rolling.policies.time.interval = 1 + +appender.routing.routes.route2.type = Route +# This route is chosen if ThreadContext has no value for key pid +appender.routing.routes.route2.key=$${ctx:pid} +appender.routing.routes.route2.rolling.type = RollingFile +appender.routing.routes.route2.rolling.name = Routing-default +appender.routing.routes.route2.rolling.fileName = ${filename}-default +appender.routing.routes.route2.rolling.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}-default.%d{yyyy-MM-dd} +appender.routing.routes.route2.rolling.layout.type = PatternLayout +appender.routing.routes.route2.rolling.layout.pattern = %d{ISO8601} %5p [%t] %pid %c{2}: %m%n +appender.routing.routes.route2.rolling.policies.type = Policies +appender.routing.routes.route2.rolling.policies.time.type = TimeBasedTriggeringPolicy +appender.routing.routes.route2.rolling.policies.time.interval = 1 + + + # list of all loggers loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX, PerfLogger, AmazonAws, ApacheHttp diff --git a/llap-server/src/main/resources/llap-cli-log4j2.properties b/llap-server/src/main/resources/llap-cli-log4j2.properties index 687c97399a6632205318c24665ed9a97dc30c552..e2c00ae0e6aae7fac1532bf710adb5d8d1fbf0b7 100644 --- a/llap-server/src/main/resources/llap-cli-log4j2.properties +++ b/llap-server/src/main/resources/llap-cli-log4j2.properties @@ -46,7 +46,6 @@ appender.llapstatusconsole.layout.pattern = %m%n appender.DRFA.type = RollingRandomAccessFile appender.DRFA.name = DRFA appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file} -# Use %pidn in the filePattern to append @ to the filename if you want separate log files for different CLI session appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd} appender.DRFA.layout.type = PatternLayout appender.DRFA.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java deleted file mode 100644 index c49f53fd3cdcf71ebca36dec185021e18d0b1ef9..0000000000000000000000000000000000000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.log; - -import java.lang.management.ManagementFactory; - -import org.apache.logging.log4j.core.config.plugins.Plugin; -import org.apache.logging.log4j.core.config.plugins.PluginFactory; -import org.apache.logging.log4j.core.pattern.AbstractPatternConverter; -import org.apache.logging.log4j.core.pattern.ArrayPatternConverter; -import org.apache.logging.log4j.core.pattern.ConverterKeys; - -/** - * FilePattern converter that converts %pid pattern to @ information - * obtained at runtime. - * - * Example usage: - * - * - * Will generate output file with name containing @ like below - * test.log.95232@localhost.gz - */ -@Plugin(name = "PidFilePatternConverter", category = "FileConverter") -@ConverterKeys({ "pid" }) -public class PidFilePatternConverter extends AbstractPatternConverter implements - ArrayPatternConverter { - - /** - * Private constructor. - */ - private PidFilePatternConverter() { - super("pid", "pid"); - } - - @PluginFactory - public static PidFilePatternConverter newInstance() { - return new PidFilePatternConverter(); - } - - public void format(StringBuilder toAppendTo, Object... objects) { - toAppendTo.append(ManagementFactory.getRuntimeMXBean().getName()); - } - - public void format(Object obj, StringBuilder toAppendTo) { - toAppendTo.append(ManagementFactory.getRuntimeMXBean().getName()); - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/ProcessIdPatternConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/log/ProcessIdPatternConverter.java new file mode 100644 index 0000000000000000000000000000000000000000..92486d82d51b583b1e6ac33e1a783ad20fc3739e --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/log/ProcessIdPatternConverter.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.log; + +import org.apache.logging.log4j.ThreadContext; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.pattern.ConverterKeys; +import org.apache.logging.log4j.core.pattern.LogEventPatternConverter; + +import java.io.File; +import java.io.IOException; +import java.lang.management.ManagementFactory; + +/** + * LogEvent converter that converts ${ctx:pid} pattern to @ information + * obtained at runtime. + * + * Example usage: + * + * + * + * Will generate output file with name containing @ like below + * test.log.95232@localhost.gz + */ + +@Plugin(name = "ProcessIdPatternConverter", category = "Converter") +@ConverterKeys({ "pid", "processId" }) +public final class ProcessIdPatternConverter extends LogEventPatternConverter { + private final String pid; + + private ProcessIdPatternConverter(String[] options) { + super("Process ID", "pid"); + String temp = options.length > 0 ? options[0] : "???"; + try { + // likely works on most platforms + temp = ManagementFactory.getRuntimeMXBean().getName(); + } catch (final Exception ex) { + try { + // try a Linux-specific way + temp = new File("/proc/self").getCanonicalFile().getName(); + } catch (final IOException ignored) {} + } + pid = temp; + ThreadContext.put("pid", pid); + } + + /** + * Obtains an instance of ProcessIdPatternConverter. + * + * @param options users may specify a default like {@code %pid{NOPID} } + * @return instance of ProcessIdPatternConverter. + */ + public static ProcessIdPatternConverter newInstance(final String[] options) { + return new ProcessIdPatternConverter(options); + } + + /** + * Returns the process ID. + * @return the process ID + */ + public String getProcessId() { + return pid; + } + + @Override + public void format(final LogEvent event, final StringBuilder toAppendTo) { + toAppendTo.append(pid); + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/log/ProcessIdPatternConverterTest.java b/ql/src/test/org/apache/hadoop/hive/ql/log/ProcessIdPatternConverterTest.java new file mode 100644 index 0000000000000000000000000000000000000000..5ceb5f7f5832b140bbffc5061947f6c1ba896595 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/log/ProcessIdPatternConverterTest.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.log; + +import org.apache.logging.log4j.core.pattern.ProcessIdPatternConverter; +import org.junit.Test; + +import static org.junit.Assert.assertNotEquals; + +/** + * + */ +public class ProcessIdPatternConverterTest { + + @Test + public void getProcessId() throws Exception { + final String[] defaultValue = {"???"}; + final String actual = ProcessIdPatternConverter.newInstance(defaultValue).getProcessId(); + assertNotEquals("???", actual); + } +} diff --git a/standalone-metastore/src/main/resources/metastore-log4j2.properties b/standalone-metastore/src/main/resources/metastore-log4j2.properties index ec5039b4fbdf880fee2a3f86ec3402ba02f3d21d..a77b77c8ae1921e25912a47cc4fec06315cf4332 100644 --- a/standalone-metastore/src/main/resources/metastore-log4j2.properties +++ b/standalone-metastore/src/main/resources/metastore-log4j2.properties @@ -39,7 +39,6 @@ appender.console.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n appender.DRFA.type = RollingRandomAccessFile appender.DRFA.name = DRFA appender.DRFA.fileName = ${sys:metastore.log.dir}/${sys:metastore.log.file} -# Use %pid in the filePattern to append @ to the filename if you want separate log files for different CLI session appender.DRFA.filePattern = ${sys:metastore.log.dir}/${sys:metastore.log.file}.%d{yyyy-MM-dd} appender.DRFA.layout.type = PatternLayout appender.DRFA.layout.pattern = %d{ISO8601} %5p [%t] %c{2}: %m%n