diff --git a/accumulo-handler/pom.xml b/accumulo-handler/pom.xml index ee40f7281427bffa289504b1421398e26d0878ab..4e3a0879dc1b6390981d0c6ff77c84c1f7f6d8a4 100644 --- a/accumulo-handler/pom.xml +++ b/accumulo-handler/pom.xml @@ -91,10 +91,6 @@ slf4j-api - org.slf4j - slf4j-log4j12 - - junit junit test diff --git a/beeline/src/main/resources/beeline-log4j.properties b/beeline/src/main/resources/beeline-log4j.properties deleted file mode 100644 index fe47d948281b929851feab3ca63835763aba344c..0000000000000000000000000000000000000000 --- a/beeline/src/main/resources/beeline-log4j.properties +++ /dev/null @@ -1,24 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=WARN, console - -######## console appender ######## -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n -log4j.appender.console.encoding=UTF-8 diff --git a/beeline/src/main/resources/beeline-log4j2.xml b/beeline/src/main/resources/beeline-log4j2.xml new file mode 100644 index 0000000000000000000000000000000000000000..38935b29f0ec2c32f018819880829bc0629d0c68 --- /dev/null +++ b/beeline/src/main/resources/beeline-log4j2.xml @@ -0,0 +1,40 @@ + + + + + + + ALL + WARN + console + + + + + + + + + + + + + + + diff --git a/bin/ext/beeline.sh b/bin/ext/beeline.sh index ab3dc1aea2b7a218522841753baef890cb637f8f..9de8f6ca2e51f02e7061924ed1d9a5710b4ff8d3 100644 --- a/bin/ext/beeline.sh +++ b/bin/ext/beeline.sh @@ -31,7 +31,7 @@ beeline () { hadoopClasspath="${HADOOP_CLASSPATH}:" fi export HADOOP_CLASSPATH="${hadoopClasspath}${HIVE_CONF_DIR}:${beelineJarPath}:${superCsvJarPath}:${jlineJarPath}:${jdbcStandaloneJarPath}" - export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configuration=beeline-log4j.properties " + export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=beeline-log4j2.xml " exec $HADOOP jar ${beelineJarPath} $CLASS $HIVE_OPTS "$@" } diff --git a/bin/hive b/bin/hive index 5dc93fb3a61734675ade5ab662040d6644a9f3e8..3b38617cab1226bf6bc79ff54f6f9fb32146a6fc 100755 --- a/bin/hive +++ b/bin/hive @@ -193,6 +193,9 @@ if [ "$HADOOP_HOME" == "" ]; then exit 4; fi +# to avoid errors from log4j2 automatic configuration loading +export HADOOP_CLIENT_OPTS=" -Dlog4j.configurationFile=hive-log4j2.xml " + HADOOP=$HADOOP_HOME/bin/hadoop if [ ! -f ${HADOOP} ]; then echo "Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must be set or hadoop must be in the path"; diff --git a/common/pom.xml b/common/pom.xml index aedf7ba11e4952b90e57d5d70c8fc210d453b285..88ac28c988d029231a5878f68d0d3e64dca96072 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -61,14 +61,24 @@ ${joda.version} - log4j - log4j - ${log4j.version} + org.apache.logging.log4j + log4j-1.2-api + ${log4j2.version} - log4j - apache-log4j-extras - ${log4j-extras.version} + org.apache.logging.log4j + log4j-web + ${log4j2.version} + + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j2.version} + + + org.apache.logging.log4j + log4j-jcl + ${log4j2.version} org.apache.commons diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java index 91186755c986359b0a2abf7b5c5faaa67622d7c3..3ca5c0f7cb59a64f304af5ac0417ade4f4a967c7 100644 --- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java @@ -18,26 +18,22 @@ package org.apache.hadoop.hive.common; -import java.net.URL; import java.io.File; -import java.io.IOException; -import java.io.FileNotFoundException; +import java.net.URL; -import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.log4j.LogManager; -import org.apache.log4j.PropertyConfigurator; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.logging.log4j.core.config.Configurator; /** * Utilities common to logging operations. */ public class LogUtils { - private static final String HIVE_L4J = "hive-log4j.properties"; - private static final String HIVE_EXEC_L4J = "hive-exec-log4j.properties"; + private static final String HIVE_L4J = "hive-log4j2.xml"; + private static final String HIVE_EXEC_L4J = "hive-exec-log4j2.xml"; private static final Log l4j = LogFactory.getLog(LogUtils.class); @SuppressWarnings("serial") @@ -95,8 +91,7 @@ private static String initHiveLog4jCommon(ConfVars confVarName) } System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId); } - LogManager.resetConfiguration(); - PropertyConfigurator.configure(log4jFileName); + Configurator.initialize(null, log4jFileName); logConfigLocation(conf); return ("Logging initialized using configuration in " + log4jConfigFile); } @@ -123,8 +118,7 @@ private static String initHiveLog4jDefault( break; } if (hive_l4j != null) { - LogManager.resetConfiguration(); - PropertyConfigurator.configure(hive_l4j); + Configurator.initialize(null, hive_l4j.toString()); logConfigLocation(conf); return (logMessage + "\n" + "Logging initialized using configuration in " + hive_l4j); } else { diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 33b67dd7b0fde41f81f8d86ea8c83d29c631e3d7..a2032cdf69dc68ae378bb9d9b836bcc60108c1fa 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1696,13 +1696,13 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) { // logging configuration HIVE_LOG4J_FILE("hive.log4j.file", "", "Hive log4j configuration file.\n" + - "If the property is not set, then logging will be initialized using hive-log4j.properties found on the classpath.\n" + - "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.properties\"), \n" + + "If the property is not set, then logging will be initialized using hive-log4j2.xml found on the classpath.\n" + + "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.xml\"), \n" + "which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."), HIVE_EXEC_LOG4J_FILE("hive.exec.log4j.file", "", "Hive log4j configuration file for execution mode(sub command).\n" + - "If the property is not set, then logging will be initialized using hive-exec-log4j.properties found on the classpath.\n" + - "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.properties\"), \n" + + "If the property is not set, then logging will be initialized using hive-exec-log4j2.xml found on the classpath.\n" + + "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.xml\"), \n" + "which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."), HIVE_LOG_EXPLAIN_OUTPUT("hive.log.explain.output", false, diff --git a/common/src/main/resources/hive-log4j.properties b/common/src/main/resources/hive-log4j.properties deleted file mode 100644 index 14fa725de743e233af76345f3d54f2b9ab390c7a..0000000000000000000000000000000000000000 --- a/common/src/main/resources/hive-log4j.properties +++ /dev/null @@ -1,88 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hive.log.threshold=ALL -hive.root.logger=INFO,DRFA -hive.log.dir=${java.io.tmpdir}/${user.name} -hive.log.file=hive.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshold=${hive.log.threshold} - -# -# Daily Rolling File Appender -# -# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files -# for different CLI session. -# -# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender - -log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n -log4j.appender.console.encoding=UTF-8 - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA diff --git a/common/src/main/resources/hive-log4j2.xml b/common/src/main/resources/hive-log4j2.xml new file mode 100644 index 0000000000000000000000000000000000000000..fc41012e95c333a4911016009a901ba84cf6ef64 --- /dev/null +++ b/common/src/main/resources/hive-log4j2.xml @@ -0,0 +1,111 @@ + + + + + + + ALL + INFO + DRFA + ${sys:java.io.tmpdir}/${sys:user.name} + hive.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java index d5cedb17b24a6a3cb9f683383b8cbb61ce88eef2..92269e76fe8cada98d2bec61cab0d21ed06a0ed8 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java @@ -21,12 +21,12 @@ import java.io.File; import java.io.InputStreamReader; -import junit.framework.TestCase; - import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.common.util.HiveTestUtils; +import junit.framework.TestCase; + /** * TestHiveLogging * @@ -104,9 +104,9 @@ public void testHiveLogging() throws Exception { // customized log4j config log file to be: /${test.tmp.dir}/TestHiveLogging/hiveLog4jTest.log File customLogPath = new File(new File(System.getProperty("test.tmp.dir")), System.getProperty("user.name") + "-TestHiveLogging/"); - String customLogName = "hiveLog4jTest.log"; + String customLogName = "hiveLog4j2Test.log"; File customLogFile = new File(customLogPath, customLogName); RunTest(customLogFile, - "hive-log4j-test.properties", "hive-exec-log4j-test.properties"); + "hive-log4j2-test.xml", "hive-exec-log4j2-test.xml"); } } diff --git a/common/src/test/resources/hive-exec-log4j-test.properties b/common/src/test/resources/hive-exec-log4j-test.properties deleted file mode 100644 index 1e53f265f4a1782baf6c347ce99aabcb90502109..0000000000000000000000000000000000000000 --- a/common/src/test/resources/hive-exec-log4j-test.properties +++ /dev/null @@ -1,59 +0,0 @@ -# Define some default values that can be overridden by system properties -hive.root.logger=INFO,FA -hive.log.dir=/${test.tmp.dir}/${user.name}-TestHiveLogging -hive.log.file=hiveExecLog4jTest.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshhold=WARN - -# -# File Appender -# - -log4j.appender.FA=org.apache.log4j.FileAppender -log4j.appender.FA.File=${hive.log.dir}/${hive.log.file} -log4j.appender.FA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter - - -log4j.category.DataNucleus=ERROR,FA -log4j.category.Datastore=ERROR,FA -log4j.category.Datastore.Schema=ERROR,FA -log4j.category.JPOX.Datastore=ERROR,FA -log4j.category.JPOX.Plugin=ERROR,FA -log4j.category.JPOX.MetaData=ERROR,FA -log4j.category.JPOX.Query=ERROR,FA -log4j.category.JPOX.General=ERROR,FA -log4j.category.JPOX.Enhancer=ERROR,FA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,FA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,FA diff --git a/common/src/test/resources/hive-exec-log4j2-test.xml b/common/src/test/resources/hive-exec-log4j2-test.xml new file mode 100644 index 0000000000000000000000000000000000000000..5c0a0cf455b6554734b13bc7323e99ca0ab0a7b1 --- /dev/null +++ b/common/src/test/resources/hive-exec-log4j2-test.xml @@ -0,0 +1,86 @@ + + + + + + + ALL + INFO + FA + ${sys:test.tmp.dir}/${sys:user.name}-TestHiveLogging + hiveExecLog4j2Test.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/common/src/test/resources/hive-log4j-test.properties b/common/src/test/resources/hive-log4j-test.properties deleted file mode 100644 index 03483254667c1ffa10c2e1711bb572c9f0d95c16..0000000000000000000000000000000000000000 --- a/common/src/test/resources/hive-log4j-test.properties +++ /dev/null @@ -1,71 +0,0 @@ -# Define some default values that can be overridden by system properties -hive.root.logger=WARN,DRFA -hive.log.dir=${test.tmp.dir}/${user.name}-TestHiveLogging -hive.log.file=hiveLog4jTest.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshhold=WARN - -# -# Daily Rolling File Appender -# -# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files -# for different CLI session. -# -# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender - -log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n -log4j.appender.console.encoding=UTF-8 - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA diff --git a/common/src/test/resources/hive-log4j2-test.xml b/common/src/test/resources/hive-log4j2-test.xml new file mode 100644 index 0000000000000000000000000000000000000000..8295a8fc18e3a910cc6d6eaa67a3ba0882c71ff5 --- /dev/null +++ b/common/src/test/resources/hive-log4j2-test.xml @@ -0,0 +1,95 @@ + + + + + + + ALL + WARN + DRFA + ${sys:test.tmp.dir}/${sys:user.name}-TestHiveLogging + hiveLog4j2Test.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/data/conf/hive-log4j-old.properties b/data/conf/hive-log4j-old.properties deleted file mode 100644 index f274b8c9425258a84f29c9c6120fca71b872ab2b..0000000000000000000000000000000000000000 --- a/data/conf/hive-log4j-old.properties +++ /dev/null @@ -1,82 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hive.root.logger=DEBUG,DRFA -hive.log.dir=${build.dir.hive}/ql/tmp/ -hive.log.file=hive.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshhold=WARN - -# -# Daily Rolling File Appender -# - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA -log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA diff --git a/data/conf/hive-log4j.properties b/data/conf/hive-log4j.properties deleted file mode 100644 index 023e3c2ac647715b9bae512ee5f796e56bd726b2..0000000000000000000000000000000000000000 --- a/data/conf/hive-log4j.properties +++ /dev/null @@ -1,97 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hive.root.logger=DEBUG,DRFA -hive.log.dir=${test.tmp.dir}/log/ -hive.log.file=hive.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshhold=WARN - -# -# Daily Rolling File Appender -# - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n - -# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job! -# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy. -# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html -# Add "DAILY" to hive.root.logger above if you want to use this. -log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender -log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy -log4j.appender.DAILY.rollingPolicy.ActiveFileName=${hive.log.dir}/${hive.log.file} -log4j.appender.DAILY.rollingPolicy.FileNamePattern=${hive.log.dir}/${hive.log.file}.%d{yyyy-MM-dd} -log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout -log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA -log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA -log4j.logger.org.apache.zookeeper=INFO,DRFA -log4j.logger.org.apache.zookeeper.server.ServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocket=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA -log4j.logger.org.apache.hadoop.hive.ql.log.PerfLogger=${hive.ql.log.PerfLogger.level} -log4j.logger.org.apache.hadoop.hive.ql.exec.Operator=INFO,DRFA -log4j.logger.org.apache.hadoop.hive.serde2.lazy=INFO,DRFA -log4j.logger.org.apache.hadoop.hive.metastore.ObjectStore=INFO,DRFA diff --git a/data/conf/hive-log4j2.xml b/data/conf/hive-log4j2.xml new file mode 100644 index 0000000000000000000000000000000000000000..a8924b3429622d268577b6f2946821050b941d08 --- /dev/null +++ b/data/conf/hive-log4j2.xml @@ -0,0 +1,148 @@ + + + + + + + ALL + DEBUG + DRFA + ${sys:test.tmp.dir}/log + hive.log + INFO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/data/conf/spark/log4j.properties b/data/conf/spark/log4j.properties deleted file mode 100644 index 8838c0efd04baa1ec5c7c13a5da28f4c4e766f31..0000000000000000000000000000000000000000 --- a/data/conf/spark/log4j.properties +++ /dev/null @@ -1,24 +0,0 @@ -log4j.rootCategory=DEBUG, DRFA - -hive.spark.log.dir=target/tmp/log -# Settings to quiet third party logs that are too verbose -log4j.logger.org.eclipse.jetty=WARN -log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR -log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO -log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender - -log4j.appender.DRFA.File=${hive.spark.log.dir}/spark.log - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n diff --git a/data/conf/spark/log4j2.xml b/data/conf/spark/log4j2.xml new file mode 100644 index 0000000000000000000000000000000000000000..d5293568841605a50ecbc1666d6fd59e8857ccf5 --- /dev/null +++ b/data/conf/spark/log4j2.xml @@ -0,0 +1,74 @@ + + + + + + + DEBUG + DRFA + target/tmp/log + spark.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/xdocs/language_manual/cli.xml b/docs/xdocs/language_manual/cli.xml index a2933827c7f9079257f197c8d63ffab0d881be10..eb91e44577d1d4df27d2f00da10b87766e4f171a 100644 --- a/docs/xdocs/language_manual/cli.xml +++ b/docs/xdocs/language_manual/cli.xml @@ -163,7 +163,7 @@ Sample Usage:

-Hive uses log4j for logging. These logs are not emitted to the standard output by default but are instead captured to a log file specified by Hive's log4j properties file. By default Hive will use hive-log4j.default in the conf/ directory of the hive installation which writes out logs to /tmp/$USER/hive.log and uses the WARN level. +Hive uses log4j for logging. These logs are not emitted to the standard output by default but are instead captured to a log file specified by Hive's log4j properties file. By default Hive will use hive-log4j2.xml in the conf/ directory of the hive installation which writes out logs to /tmp/$USER/hive.log and uses the WARN level.

It is often desirable to emit the logs to the standard output and/or change the logging level for debugging purposes. These can be done from the command line as follows:

diff --git a/hcatalog/bin/hcat_server.sh b/hcatalog/bin/hcat_server.sh index 6b09d3e93fd38d4171a70087539d7dca6099983c..d1aecb8d4f99f369c5697826878d1d9c3b073b54 100644 --- a/hcatalog/bin/hcat_server.sh +++ b/hcatalog/bin/hcat_server.sh @@ -84,7 +84,7 @@ function start_hcat() { export AUX_CLASSPATH=${AUX_CLASSPATH} export HADOOP_HOME=$HADOOP_HOME - #export HADOOP_OPTS="-Dlog4j.configuration=file://${HCAT_PREFIX}/conf/log4j.properties" + #export HADOOP_OPTS="-Dlog4j.configurationFile=file://${HCAT_PREFIX}/conf/log4j2.xml" export HADOOP_OPTS="${HADOOP_OPTS} -server -XX:+UseConcMarkSweepGC -XX:ErrorFile=${HCAT_LOG_DIR}/hcat_err_pid%p.log -Xloggc:${HCAT_LOG_DIR}/hcat_gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps" export HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE:-2048} # 8G is better if you have it export METASTORE_PORT=${METASTORE_PORT:-9083} diff --git a/hcatalog/bin/templeton.cmd b/hcatalog/bin/templeton.cmd index e9a735d5bd0063ae0bdc89636409c15d1212e882..759f654f085fbf554cb202225294df123842b4cf 100644 --- a/hcatalog/bin/templeton.cmd +++ b/hcatalog/bin/templeton.cmd @@ -59,9 +59,9 @@ setlocal enabledelayedexpansion if not defined TEMPLETON_LOG4J ( @rem must be prefixed with file: otherwise config is not picked up - set TEMPLETON_LOG4J=file:%WEBHCAT_CONF_DIR%\webhcat-log4j.properties + set TEMPLETON_LOG4J=file:%WEBHCAT_CONF_DIR%\webhcat-log4j2.xml ) - set TEMPLETON_OPTS=-Dtempleton.log.dir=%TEMPLETON_LOG_DIR% -Dlog4j.configuration=%TEMPLETON_LOG4J% %HADOOP_OPTS% + set TEMPLETON_OPTS=-Dtempleton.log.dir=%TEMPLETON_LOG_DIR% -Dlog4j.configurationFile=%TEMPLETON_LOG4J% %HADOOP_OPTS% set arguments=%JAVA_HEAP_MAX% %TEMPLETON_OPTS% -classpath %CLASSPATH% org.apache.hive.hcatalog.templeton.Main if defined service_entry ( diff --git a/hcatalog/scripts/hcat_server_start.sh b/hcatalog/scripts/hcat_server_start.sh index 1670b70ffe66c10be59e71d423b342384360f94d..872d1b562fb106318c010e778bdf70f22a74bb83 100755 --- a/hcatalog/scripts/hcat_server_start.sh +++ b/hcatalog/scripts/hcat_server_start.sh @@ -70,7 +70,7 @@ export AUX_CLASSPATH=${AUX_CLASSPATH} export HADOOP_HOME=$HADOOP_HOME -#export HADOOP_OPTS="-Dlog4j.configuration=file://${ROOT}/conf/log4j.properties" +#export HADOOP_OPTS="-Dlog4j.configurationFile=file://${ROOT}/conf/log4j2.xml" export HADOOP_OPTS="${HADOOP_OPTS} -server -XX:+UseConcMarkSweepGC -XX:ErrorFile=${ROOT}/var/log/hcat_err_pid%p.log -Xloggc:${ROOT}/var/log/hcat_gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps" export HADOOP_HEAPSIZE=2048 # 8G is better if you have it diff --git a/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml b/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml index 9757b9c4b4c2ac0f942437b1009ff138fb9d6185..6385e40950065cace78540d100197b2b5a08c708 100644 --- a/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml +++ b/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml @@ -66,7 +66,7 @@ ${env.PIG_HOME}/bin/pig uncertainty.

Note: The location of the log files created by Templeton and some other properties - of the logging system are set in the webhcat-log4j.properties file.

+ of the logging system are set in the webhcat-log4j2.xml file.

Variables diff --git a/hcatalog/src/docs/src/documentation/content/xdocs/install.xml b/hcatalog/src/docs/src/documentation/content/xdocs/install.xml index 16da2486851c7d15e566629cb3492717d65a27f4..e2953a97f4bb51031ff20f51ebb6af7eda58c550 100644 --- a/hcatalog/src/docs/src/documentation/content/xdocs/install.xml +++ b/hcatalog/src/docs/src/documentation/content/xdocs/install.xml @@ -241,7 +241,7 @@

Server activity logs are located in root/var/log/hcat_server. Logging configuration is located at - root/conf/log4j.properties. Server logging uses + root/conf/log4j2.xml. Server logging uses DailyRollingFileAppender by default. It will generate a new file per day and does not expire old log files automatically.

diff --git a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties deleted file mode 100644 index 82684b306146835481709a891dfe1034231401cb..0000000000000000000000000000000000000000 --- a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties +++ /dev/null @@ -1,88 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hive.log.threshold=ALL -hive.root.logger=DEBUG,DRFA -hive.log.dir=/tmp/ekoifman -hive.log.file=hive.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshold=${hive.log.threshold} - -# -# Daily Rolling File Appender -# -# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files -# for different CLI session. -# -# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender - -log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n -log4j.appender.console.encoding=UTF-8 - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA diff --git a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml new file mode 100644 index 0000000000000000000000000000000000000000..e2e02d1e0fc95f00c969751dd10aadb152b621fa --- /dev/null +++ b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml @@ -0,0 +1,111 @@ + + + + + + + ALL + DEBUG + DRFA + ${sys:java.io.tmpdir}/${sys:user.name} + hive.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh b/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh index 8cc93539fbcd2f665ae754ccf6ee6aa3fe3d8ad3..e59177c2b80397d968931a3d0d8c316e84392e93 100755 --- a/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh +++ b/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh @@ -31,7 +31,7 @@ cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site. #cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mssql.xml ${HIVE_HOME}/conf/hive-site.xml cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml ${HIVE_HOME}/hcatalog/etc/webhcat/webhcat-site.xml -cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties ${HIVE_HOME}/conf/hive-log4j.properties +cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml ${HIVE_HOME}/conf/hive-log4j2.xml if [ -f ${MYSQL_CLIENT_JAR} ]; then cp ${MYSQL_CLIENT_JAR} ${HIVE_HOME}/lib diff --git a/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh b/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh index 0be8dde22ebc3710e38e1c35013e9095dca4aefb..c80fdd5d1225a8547e568045ab39fb4470b4c342 100644 --- a/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh +++ b/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh @@ -215,11 +215,11 @@ else fi if [[ -z "$WEBHCAT_LOG4J" ]]; then - WEBHCAT_LOG4J="file://$base_dir/etc/webhcat/webhcat-log4j.properties"; + WEBHCAT_LOG4J="file://$base_dir/etc/webhcat/webhcat-log4j2.xml"; fi export HADOOP_USER_CLASSPATH_FIRST=true -export HADOOP_OPTS="${HADOOP_OPTS} -Dwebhcat.log.dir=$WEBHCAT_LOG_DIR -Dlog4j.configuration=$WEBHCAT_LOG4J" +export HADOOP_OPTS="${HADOOP_OPTS} -Dwebhcat.log.dir=$WEBHCAT_LOG_DIR -Dlog4j.configurationFile=$WEBHCAT_LOG4J" start_cmd="$HADOOP_PREFIX/bin/hadoop jar $JAR org.apache.hive.hcatalog.templeton.Main " diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties deleted file mode 100644 index 866052cc1df50e0cb5325fceca2f8b270891215f..0000000000000000000000000000000000000000 --- a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Define some default values that can be overridden by system properties -webhcat.root.logger = INFO, standard -webhcat.log.dir = . -webhcat.log.file = webhcat.log - -log4j.rootLogger = ${webhcat.root.logger} - -# Logging Threshold -log4j.threshhold = DEBUG - -log4j.appender.standard = org.apache.log4j.DailyRollingFileAppender -log4j.appender.standard.File = ${webhcat.log.dir}/${webhcat.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern = .yyyy-MM-dd - -log4j.appender.DRFA.layout = org.apache.log4j.PatternLayout - -log4j.appender.standard.layout = org.apache.log4j.PatternLayout -log4j.appender.standard.layout.conversionPattern = %-5p | %d{DATE} | %c | %m%n - -# Class logging settings -log4j.logger.com.sun.jersey = DEBUG -log4j.logger.com.sun.jersey.spi.container.servlet.WebComponent = ERROR -log4j.logger.org.apache.hadoop = INFO -log4j.logger.org.apache.hadoop.conf = WARN -log4j.logger.org.apache.zookeeper = WARN -log4j.logger.org.eclipse.jetty = INFO diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml new file mode 100644 index 0000000000000000000000000000000000000000..0247ac51504826efe2212eeda6f7ca521bd28efb --- /dev/null +++ b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml @@ -0,0 +1,74 @@ + + + + + + + INFO + standard + . + webhcat.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java new file mode 100644 index 0000000000000000000000000000000000000000..b93c14dca6f2dec89f2c5f98de6ebb238e9bdb79 --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java @@ -0,0 +1,138 @@ +package org.apache.hive.service.cli.operation; + +import java.io.File; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.apache.hive.service.cli.CLIServiceClient; +import org.apache.hive.service.cli.FetchOrientation; +import org.apache.hive.service.cli.FetchType; +import org.apache.hive.service.cli.OperationHandle; +import org.apache.hive.service.cli.RowSet; +import org.apache.hive.service.cli.SessionHandle; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * OperationLoggingAPITestBase + * Test the FetchResults of TFetchType.LOG in thrift level. + * This is the base class. + */ +public class TestOperationLoggingLayout { + protected static HiveConf hiveConf; + protected static String tableName; + private File dataFile; + protected CLIServiceClient client; + protected static MiniHS2 miniHS2 = null; + protected static Map confOverlay; + protected SessionHandle sessionHandle; + protected final String sql = "select * from " + tableName; + private final String sqlCntStar = "select count(*) from " + tableName; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + tableName = "testOperationLoggingAPIWithMr_table"; + hiveConf = new HiveConf(); + hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "execution"); + // We need to set the below parameter to test performance level logging + hiveConf.set("hive.ql.log.PerfLogger.level", "INFO,DRFA"); + miniHS2 = new MiniHS2(hiveConf); + confOverlay = new HashMap(); + confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + miniHS2.start(confOverlay); + } + + /** + * Open a session, and create a table for cases usage + * + * @throws Exception + */ + @Before + public void setUp() throws Exception { + dataFile = new File(hiveConf.get("test.data.files"), "kv1.txt"); + client = miniHS2.getServiceClient(); + sessionHandle = setupSession(); + } + + @After + public void tearDown() throws Exception { + // Cleanup + String queryString = "DROP TABLE " + tableName; + client.executeStatement(sessionHandle, queryString, null); + + client.closeSession(sessionHandle); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + miniHS2.stop(); + } + + @Test + public void testSwitchLogLayout() throws Exception { + // verify whether the sql operation log is generated and fetch correctly. + OperationHandle operationHandle = client.executeStatement(sessionHandle, sqlCntStar, null); + RowSet rowSetLog = client.fetchResults(operationHandle, FetchOrientation.FETCH_FIRST, 1000, + FetchType.LOG); + Iterator iter = rowSetLog.iterator(); + // non-verbose pattern is %-5p : %m%n. Look for " : " + while (iter.hasNext()) { + String row = iter.next()[0].toString(); + Assert.assertEquals(true, row.matches("^(FATAL|ERROR|WARN|INFO|DEBUG|TRACE).*$")); + } + + String queryString = "set hive.server2.logging.operation.level=verbose"; + client.executeStatement(sessionHandle, queryString, null); + operationHandle = client.executeStatement(sessionHandle, sqlCntStar, null); + // just check for first few lines, some log lines are multi-line strings which can break format + // checks below + rowSetLog = client.fetchResults(operationHandle, FetchOrientation.FETCH_FIRST, 10, + FetchType.LOG); + iter = rowSetLog.iterator(); + // verbose pattern is "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n" + while (iter.hasNext()) { + String row = iter.next()[0].toString(); + // just check if the log line starts with date + Assert.assertEquals(true, + row.matches("^\\d{2}[/](0[1-9]|1[012])[/](0[1-9]|[12][0-9]|3[01]).*$")); + } + } + + private SessionHandle setupSession() throws Exception { + // Open a session + SessionHandle sessionHandle = client.openSession(null, null, null); + + // Change lock manager to embedded mode + String queryString = "SET hive.lock.manager=" + + "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"; + client.executeStatement(sessionHandle, queryString, null); + + // Drop the table if it exists + queryString = "DROP TABLE IF EXISTS " + tableName; + client.executeStatement(sessionHandle, queryString, null); + + // Create a test table + queryString = "create table " + tableName + " (key int, value string)"; + client.executeStatement(sessionHandle, queryString, null); + + // Load data + queryString = "load data local inpath '" + dataFile + "' into table " + tableName; + client.executeStatement(sessionHandle, queryString, null); + + // Precondition check: verify whether the table is created and data is fetched correctly. + OperationHandle operationHandle = client.executeStatement(sessionHandle, sql, null); + RowSet rowSetResult = client.fetchResults(operationHandle); + Assert.assertEquals(500, rowSetResult.numRows()); + Assert.assertEquals(238, rowSetResult.iterator().next()[0]); + Assert.assertEquals("val_238", rowSetResult.iterator().next()[1]); + + return sessionHandle; + } +} diff --git a/itests/pom.xml b/itests/pom.xml index f156cc47e2d046bca3200e931f4ac47928e52f85..acce7131948edd5aeab34af6879d781daa12ba30 100644 --- a/itests/pom.xml +++ b/itests/pom.xml @@ -94,7 +94,7 @@ } mkdir -p $DOWNLOAD_DIR download "http://d3jw87u4immizc.cloudfront.net/spark-tarball/spark-${spark.version}-bin-hadoop2-without-hive.tgz" "spark" - cp -f $HIVE_ROOT/data/conf/spark/log4j.properties $BASE_DIR/spark/conf/ + cp -f $HIVE_ROOT/data/conf/spark/log4j2.xml $BASE_DIR/spark/conf/ sed '/package /d' ${basedir}/${hive.path.to.root}/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java > /tmp/UDFExampleAdd.java javac -cp ${settings.localRepository}/org/apache/hive/hive-exec/${project.version}/hive-exec-${project.version}.jar /tmp/UDFExampleAdd.java -d /tmp jar -cf /tmp/udfexampleadd-1.0.jar -C /tmp UDFExampleAdd.class diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index ee2cea05fb3b4c9582b7b5ac9d5cd929d3b9ec1a..5c6049192879fffadc1aa0c6e951f806da878a24 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -5913,7 +5913,7 @@ public static void main(String[] args) throws Throwable { // If the log4j.configuration property hasn't already been explicitly set, // use Hive's default log4j configuration - if (System.getProperty("log4j.configuration") == null) { + if (System.getProperty("log4j.configurationFile") == null) { // NOTE: It is critical to do this here so that log4j is reinitialized // before any of the other core hive classes are loaded try { diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java index ad994276b2dfd201ca32ef84131c0ebf8d0f8e0d..df42f1a55b327cfccada74b8c85dbd4d62e25bf9 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java @@ -17,15 +17,11 @@ */ package org.apache.hadoop.hive.metastore.txn; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.api.*; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import static junit.framework.Assert.assertEquals; +import static junit.framework.Assert.assertNotNull; +import static junit.framework.Assert.assertNull; +import static junit.framework.Assert.assertTrue; +import static junit.framework.Assert.fail; import java.util.ArrayList; import java.util.Arrays; @@ -34,7 +30,29 @@ import java.util.SortedSet; import java.util.TreeSet; -import static junit.framework.Assert.*; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.AbortTxnRequest; +import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions; +import org.apache.hadoop.hive.metastore.api.CommitTxnRequest; +import org.apache.hadoop.hive.metastore.api.CompactionRequest; +import org.apache.hadoop.hive.metastore.api.CompactionType; +import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse; +import org.apache.hadoop.hive.metastore.api.LockComponent; +import org.apache.hadoop.hive.metastore.api.LockLevel; +import org.apache.hadoop.hive.metastore.api.LockRequest; +import org.apache.hadoop.hive.metastore.api.LockResponse; +import org.apache.hadoop.hive.metastore.api.LockState; +import org.apache.hadoop.hive.metastore.api.LockType; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.OpenTxnRequest; +import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse; +import org.apache.hadoop.hive.metastore.api.ShowCompactRequest; +import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; +import org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement; +import org.apache.hadoop.hive.metastore.api.UnlockRequest; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; /** * Tests for TxnHandler. @@ -43,11 +61,9 @@ private HiveConf conf = new HiveConf(); private CompactionTxnHandler txnHandler; - static final private Log LOG = LogFactory.getLog(TestCompactionTxnHandler.class); public TestCompactionTxnHandler() throws Exception { TxnDbUtil.setConfValues(conf); - LogManager.getLogger(TxnHandler.class.getName()).setLevel(Level.DEBUG); tearDown(); } diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java index f478184bb82da4a3c3cf015dad1a6dd750399933..6dc0bd37dd4b79645994d76a084277f0d6878b45 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java @@ -17,16 +17,11 @@ */ package org.apache.hadoop.hive.metastore.txn; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.api.*; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; +import static junit.framework.Assert.assertEquals; +import static junit.framework.Assert.assertFalse; +import static junit.framework.Assert.assertNull; +import static junit.framework.Assert.assertTrue; +import static junit.framework.Assert.fail; import java.sql.Connection; import java.sql.SQLException; @@ -36,21 +31,66 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import static junit.framework.Assert.*; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.AbortTxnRequest; +import org.apache.hadoop.hive.metastore.api.CheckLockRequest; +import org.apache.hadoop.hive.metastore.api.CommitTxnRequest; +import org.apache.hadoop.hive.metastore.api.CompactionRequest; +import org.apache.hadoop.hive.metastore.api.CompactionType; +import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse; +import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse; +import org.apache.hadoop.hive.metastore.api.HeartbeatRequest; +import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeRequest; +import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse; +import org.apache.hadoop.hive.metastore.api.LockComponent; +import org.apache.hadoop.hive.metastore.api.LockLevel; +import org.apache.hadoop.hive.metastore.api.LockRequest; +import org.apache.hadoop.hive.metastore.api.LockResponse; +import org.apache.hadoop.hive.metastore.api.LockState; +import org.apache.hadoop.hive.metastore.api.LockType; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NoSuchLockException; +import org.apache.hadoop.hive.metastore.api.NoSuchTxnException; +import org.apache.hadoop.hive.metastore.api.OpenTxnRequest; +import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse; +import org.apache.hadoop.hive.metastore.api.ShowCompactRequest; +import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; +import org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement; +import org.apache.hadoop.hive.metastore.api.ShowLocksRequest; +import org.apache.hadoop.hive.metastore.api.ShowLocksResponse; +import org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement; +import org.apache.hadoop.hive.metastore.api.TxnAbortedException; +import org.apache.hadoop.hive.metastore.api.TxnInfo; +import org.apache.hadoop.hive.metastore.api.TxnOpenException; +import org.apache.hadoop.hive.metastore.api.TxnState; +import org.apache.hadoop.hive.metastore.api.UnlockRequest; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.Configuration; +import org.junit.After; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests for TxnHandler. */ public class TestTxnHandler { static final private String CLASS_NAME = TxnHandler.class.getName(); - static final private Log LOG = LogFactory.getLog(CLASS_NAME); + private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME); private HiveConf conf = new HiveConf(); private TxnHandler txnHandler; public TestTxnHandler() throws Exception { TxnDbUtil.setConfValues(conf); - LogManager.getLogger(TxnHandler.class.getName()).setLevel(Level.DEBUG); + LoggerContext ctx = (LoggerContext) LogManager.getContext(false); + Configuration conf = ctx.getConfiguration(); + conf.getLoggerConfig(CLASS_NAME).setLevel(Level.DEBUG); + ctx.updateLoggers(conf); tearDown(); } diff --git a/packaging/src/main/assembly/bin.xml b/packaging/src/main/assembly/bin.xml index a1c176fe1eef3355b41b40963025c40da7cc27c9..9fe9282499c9b6d438942966156fc02a65981cc0 100644 --- a/packaging/src/main/assembly/bin.xml +++ b/packaging/src/main/assembly/bin.xml @@ -274,7 +274,7 @@ 644 webhcat-default.xml - webhcat-log4j.properties + webhcat-log4j2.xml true hcatalog/etc/webhcat @@ -322,19 +322,19 @@ - ${project.parent.basedir}/common/src/main/resources/hive-log4j.properties + ${project.parent.basedir}/common/src/main/resources/hive-log4j2.xml conf - hive-log4j.properties.template + hive-log4j2.xml.template - ${project.parent.basedir}/ql/src/main/resources/hive-exec-log4j.properties + ${project.parent.basedir}/ql/src/main/resources/hive-exec-log4j2.xml conf - hive-exec-log4j.properties.template + hive-exec-log4j2.xml.template - ${project.parent.basedir}/beeline/src/main/resources/beeline-log4j.properties + ${project.parent.basedir}/beeline/src/main/resources/beeline-log4j2.xml conf - beeline-log4j.properties.template + beeline-log4j2.xml.template ${project.parent.basedir}/hcatalog/README.txt diff --git a/pom.xml b/pom.xml index 1abf7382213096bdc157aa952c56a18e4cd8e201..6e6295fdf0ba59950f6bb6a6ec50df23b75b9a9f 100644 --- a/pom.xml +++ b/pom.xml @@ -147,8 +147,7 @@ 2.22 0.9.2 0.9.2 - 1.2.16 - 1.2.17 + 2.3 2.3 1.9.5 2.0.0-M5 @@ -355,14 +354,24 @@ ${junit.version} - log4j - log4j - ${log4j.version} + org.apache.logging.log4j + log4j-1.2-api + ${log4j2.version} - log4j - apache-log4j-extras - ${log4j-extras.version} + org.apache.logging.log4j + log4j-web + ${log4j2.version} + + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j2.version} + + + org.apache.logging.log4j + log4j-jcl + ${log4j2.version} org.antlr @@ -573,11 +582,6 @@ ${slf4j.version} - org.slf4j - slf4j-log4j12 - ${slf4j.version} - - xerces xercesImpl ${xerces.version} @@ -593,11 +597,6 @@ slf4j-api ${slf4j.version} - - org.slf4j - slf4j-log4j12 - ${slf4j.version} - @@ -861,7 +860,7 @@ ${maven.repo.local} local - ${test.log4j.scheme}${test.tmp.dir}/conf/hive-log4j.properties + ${test.log4j.scheme}${test.tmp.dir}/conf/hive-log4j2.xml true ${test.tmp.dir} diff --git a/ql/pom.xml b/ql/pom.xml index 6026c4988f08da83908e316400c0ee9026855b25..a63c0e71aa16d4896e5cc771a2a544d3df766b3d 100644 --- a/ql/pom.xml +++ b/ql/pom.xml @@ -112,14 +112,19 @@ ${javolution.version} - log4j - log4j - ${log4j.version} + org.apache.logging.log4j + log4j-1.2-api + ${log4j2.version} - log4j - apache-log4j-extras - ${log4j-extras.version} + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j2.version} + + + org.apache.logging.log4j + log4j-jcl + ${log4j2.version} org.antlr diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index a2cf71281e8cd960e5f297638005da010b74d6ae..82345ee760550b1a8daaf7fc0fe22475a6f2c2b9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -27,7 +27,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.Enumeration; import java.util.List; import java.util.Properties; @@ -57,13 +56,14 @@ import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.exec.tez.TezSessionState; import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager; +import org.apache.hadoop.hive.ql.exec.tez.TezSessionState; import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveKey; import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl; import org.apache.hadoop.hive.ql.io.IOPrepareCache; +import org.apache.hadoop.hive.ql.log.NullAppender; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.FetchWork; import org.apache.hadoop.hive.ql.plan.MapWork; @@ -88,11 +88,12 @@ import org.apache.hadoop.mapred.Partitioner; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.log4j.Appender; -import org.apache.log4j.BasicConfigurator; -import org.apache.log4j.FileAppender; -import org.apache.log4j.LogManager; -import org.apache.log4j.varia.NullAppender; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.appender.FileAppender; +import org.apache.logging.log4j.core.appender.RollingFileAppender; /** * ExecDriver is the central class in co-ordinating execution of any map-reduce task. @@ -687,8 +688,10 @@ public static void main(String[] args) throws IOException, HiveException { if (noLog) { // If started from main(), and noLog is on, we should not output // any logs. To turn the log on, please set -Dtest.silent=false - BasicConfigurator.resetConfiguration(); - BasicConfigurator.configure(new NullAppender()); + Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); + NullAppender appender = NullAppender.createNullAppender(); + appender.addToLogger(logger.getName(), Level.ERROR); + appender.start(); } else { setupChildLog4j(conf); } @@ -703,10 +706,12 @@ public static void main(String[] args) throws IOException, HiveException { // print out the location of the log file for the user so // that it's easy to find reason for local mode execution failures - for (Appender appender : Collections.list((Enumeration) LogManager.getRootLogger() - .getAllAppenders())) { + for (Appender appender : ((org.apache.logging.log4j.core.Logger) LogManager.getRootLogger()) + .getAppenders().values()) { if (appender instanceof FileAppender) { - console.printInfo("Execution log at: " + ((FileAppender) appender).getFile()); + console.printInfo("Execution log at: " + ((FileAppender) appender).getFileName()); + } else if (appender instanceof RollingFileAppender) { + console.printInfo("Execution log at: " + ((RollingFileAppender) appender).getFileName()); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/HiveEventCounter.java b/ql/src/java/org/apache/hadoop/hive/ql/log/HiveEventCounter.java new file mode 100644 index 0000000000000000000000000000000000000000..005f0f11dc238a8143da751d056b9ade54f542d1 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/log/HiveEventCounter.java @@ -0,0 +1,134 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.log; + +import java.io.Serializable; + +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.Layout; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.LoggerConfig; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginElement; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.layout.PatternLayout; + +import com.google.common.annotations.VisibleForTesting; + +/** + * A log4J2 Appender that simply counts logging events in four levels: + * fatal, error, warn and info. The class name is used in log4j2.xml + */ +@Plugin(name = "HiveEventCounter", category = "Core", elementType = "appender", printObject = true) +public class HiveEventCounter extends AbstractAppender { + private static LoggerContext context = (LoggerContext) LogManager.getContext(false); + private static Configuration configuration = context.getConfiguration(); + private static final String APPENDER_NAME = "HiveEventCounter"; + private static final int FATAL = 0; + private static final int ERROR = 1; + private static final int WARN = 2; + private static final int INFO = 3; + + private static class EventCounts { + private final long[] counts = {0, 0, 0, 0}; + + private synchronized void incr(int i) { + ++counts[i]; + } + + private synchronized long get(int i) { + return counts[i]; + } + } + + private static EventCounts counts = new EventCounts(); + + protected HiveEventCounter(String name, Filter filter, + Layout layout, boolean ignoreExceptions) { + super(name, filter, layout, ignoreExceptions); + } + + @PluginFactory + public static HiveEventCounter createInstance(@PluginAttribute("name") String name, + @PluginAttribute("ignoreExceptions") boolean ignoreExceptions, + @PluginElement("Layout") Layout layout, + @PluginElement("Filters") Filter filter) { + if (name == null) { + name = APPENDER_NAME; + } + + if (layout == null) { + layout = PatternLayout.createDefaultLayout(); + } + return new HiveEventCounter(name, filter, layout, ignoreExceptions); + } + + @InterfaceAudience.Private + public static long getFatal() { + return counts.get(FATAL); + } + + @InterfaceAudience.Private + public static long getError() { + return counts.get(ERROR); + } + + @InterfaceAudience.Private + public static long getWarn() { + return counts.get(WARN); + } + + @InterfaceAudience.Private + public static long getInfo() { + return counts.get(INFO); + } + + @VisibleForTesting + public void addToLogger(String loggerName, Level level) { + LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName); + loggerConfig.addAppender(this, level, null); + context.updateLoggers(); + } + + @VisibleForTesting + public void removeFromLogger(String loggerName) { + LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName); + loggerConfig.removeAppender(APPENDER_NAME); + context.updateLoggers(); + } + + public void append(LogEvent event) { + Level level = event.getLevel(); + if (level.equals(Level.INFO)) { + counts.incr(INFO); + } else if (level.equals(Level.WARN)) { + counts.incr(WARN); + } else if (level.equals(Level.ERROR)) { + counts.incr(ERROR); + } else if (level.equals(Level.FATAL)) { + counts.incr(FATAL); + } + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/NullAppender.java b/ql/src/java/org/apache/hadoop/hive/ql/log/NullAppender.java new file mode 100644 index 0000000000000000000000000000000000000000..c4cb7ddc6e891aa9096eb38801ebee244be807f2 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/log/NullAppender.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.log; + +import java.io.Serializable; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.Layout; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.LoggerConfig; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.layout.PatternLayout; + +/** + * A NullAppender merely exists, it never outputs a message to any device. + */ +@Plugin(name = "NullAppender", category = "Core", elementType = "appender", printObject = false) +public class NullAppender extends AbstractAppender { + + private static LoggerContext context = (LoggerContext) LogManager.getContext(false); + private static Configuration configuration = context.getConfiguration(); + + protected NullAppender(String name, Filter filter, + Layout layout, boolean ignoreExceptions) { + super(name, filter, layout, ignoreExceptions); + } + + @PluginFactory + public static NullAppender createNullAppender() { + return new NullAppender("NullAppender", null, PatternLayout.createDefaultLayout(), true); + } + + public void addToLogger(String loggerName, Level level) { + LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName); + loggerConfig.addAppender(this, level, null); + context.updateLoggers(); + } + + public void append(LogEvent event) { + // no-op + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/PidDailyRollingFileAppender.java b/ql/src/java/org/apache/hadoop/hive/ql/log/PidDailyRollingFileAppender.java deleted file mode 100644 index 6a59d4a908b1e06fc43595d3f020b095e4f8e4ee..0000000000000000000000000000000000000000 --- a/ql/src/java/org/apache/hadoop/hive/ql/log/PidDailyRollingFileAppender.java +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.log; - -import java.lang.management.ManagementFactory; -import java.lang.management.RuntimeMXBean; - -import org.apache.log4j.DailyRollingFileAppender; - -public class PidDailyRollingFileAppender extends DailyRollingFileAppender { - - @Override - public void setFile(String file) { - RuntimeMXBean rt = ManagementFactory.getRuntimeMXBean(); - super.setFile(file + '.' + rt.getName()); - } -} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java new file mode 100644 index 0000000000000000000000000000000000000000..4db10bb0739021d03ebb255ba079cf8a210e10fa --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.log; + +import java.lang.management.ManagementFactory; + +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.pattern.AbstractPatternConverter; +import org.apache.logging.log4j.core.pattern.ArrayPatternConverter; +import org.apache.logging.log4j.core.pattern.ConverterKeys; + +/** + * FilePattern converter that converts %pid pattern to @ information + * obtained at runtime. + * + * Example usage: + * + * + * Will generate output file with name containing @ like below + * test.log.95232@localhost.gz + */ +@Plugin(name = "PidFilePatternConverter", category = "FileConverter") +@ConverterKeys({ "pid" }) +public class PidFilePatternConverter extends AbstractPatternConverter implements + ArrayPatternConverter { + + /** + * Private constructor. + */ + private PidFilePatternConverter() { + super("pid", "pid"); + } + + @PluginFactory + public static PidFilePatternConverter newInstance() { + return new PidFilePatternConverter(); + } + + public void format(StringBuilder toAppendTo, Object... objects) { + toAppendTo.append(ManagementFactory.getRuntimeMXBean().getName()); + } + + public void format(Object obj, StringBuilder toAppendTo) { + toAppendTo.append(ManagementFactory.getRuntimeMXBean().getName()); + } +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/StringAppender.java b/ql/src/java/org/apache/hadoop/hive/ql/log/StringAppender.java new file mode 100644 index 0000000000000000000000000000000000000000..270a2324be3519b5dff87da92dfb1b298db55bec --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/log/StringAppender.java @@ -0,0 +1,128 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.log; + +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.Serializable; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.Layout; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.AbstractOutputStreamAppender; +import org.apache.logging.log4j.core.appender.OutputStreamManager; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.LoggerConfig; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.layout.PatternLayout; + +import com.google.common.annotations.VisibleForTesting; + +/** + * Log4j2 appender that writers to in-memory string object. + */ +@Plugin(name = "StringAppender", category = "Core", elementType = "appender", printObject = true) +public class StringAppender + extends AbstractOutputStreamAppender { + + private static final String APPENDER_NAME = "StringAppender"; + private static LoggerContext context = (LoggerContext) LogManager.getContext(false); + private static Configuration configuration = context.getConfiguration(); + private StringOutputStreamManager manager; + + /** + * Instantiate a WriterAppender and set the output destination to a + * new {@link OutputStreamWriter} initialized with os + * as its {@link OutputStream}. + * + * @param name The name of the Appender. + * @param layout The layout to format the message. + * @param filter + * @param ignoreExceptions + * @param immediateFlush + * @param manager The OutputStreamManager. + */ + protected StringAppender(String name, + Layout layout, Filter filter, + boolean ignoreExceptions, boolean immediateFlush, + StringOutputStreamManager manager) { + super(name, layout, filter, ignoreExceptions, immediateFlush, manager); + this.manager = manager; + } + + @PluginFactory + public static StringAppender createStringAppender( + @PluginAttribute("name") String nullablePatternString) { + PatternLayout layout; + if (nullablePatternString == null) { + layout = PatternLayout.createDefaultLayout(); + } else { + layout = PatternLayout.createLayout(nullablePatternString, configuration, + null, null, true, false, null, null); + } + + return new StringAppender(APPENDER_NAME, layout, null, false, true, + new StringOutputStreamManager(new ByteArrayOutputStream(), "StringStream", layout)); + } + + @VisibleForTesting + public void addToLogger(String loggerName, Level level) { + LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName); + loggerConfig.addAppender(this, level, null); + context.updateLoggers(); + } + + @VisibleForTesting + public void removeFromLogger(String loggerName) { + LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName); + loggerConfig.removeAppender(APPENDER_NAME); + context.updateLoggers(); + } + + public String getOutput() { + manager.flush(); + return new String(manager.getStream().toByteArray()); + } + + public void reset() { + manager.reset(); + } + + protected static class StringOutputStreamManager extends OutputStreamManager { + ByteArrayOutputStream stream; + + protected StringOutputStreamManager(ByteArrayOutputStream os, String streamName, + Layout layout) { + super(os, streamName, layout); + stream = os; + } + + public ByteArrayOutputStream getStream() { + return stream; + } + + public void reset() { + stream.reset(); + } + } +} diff --git a/ql/src/main/resources/hive-exec-log4j.properties b/ql/src/main/resources/hive-exec-log4j.properties deleted file mode 100644 index 9eaa6b64fe827bdebe0d45bc7d13a07a30cfd4a1..0000000000000000000000000000000000000000 --- a/ql/src/main/resources/hive-exec-log4j.properties +++ /dev/null @@ -1,77 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hive.log.threshold=ALL -hive.root.logger=INFO,FA -hive.log.dir=${java.io.tmpdir}/${user.name} -hive.query.id=hadoop -hive.log.file=${hive.query.id}.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshhold=${hive.log.threshold} - -# -# File Appender -# - -log4j.appender.FA=org.apache.log4j.FileAppender -log4j.appender.FA.File=${hive.log.dir}/${hive.log.file} -log4j.appender.FA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t] %p %c{2}: %m%n - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter - - -log4j.category.DataNucleus=ERROR,FA -log4j.category.Datastore=ERROR,FA -log4j.category.Datastore.Schema=ERROR,FA -log4j.category.JPOX.Datastore=ERROR,FA -log4j.category.JPOX.Plugin=ERROR,FA -log4j.category.JPOX.MetaData=ERROR,FA -log4j.category.JPOX.Query=ERROR,FA -log4j.category.JPOX.General=ERROR,FA -log4j.category.JPOX.Enhancer=ERROR,FA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,FA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,FA diff --git a/ql/src/main/resources/hive-exec-log4j2.xml b/ql/src/main/resources/hive-exec-log4j2.xml new file mode 100644 index 0000000000000000000000000000000000000000..7a4767c3351499c0401ef0097225094087b2c856 --- /dev/null +++ b/ql/src/main/resources/hive-exec-log4j2.xml @@ -0,0 +1,110 @@ + + + + + + + ALL + INFO + FA + ${sys:java.io.tmpdir}/${sys:user.name} + hadoop + ${hive.query.id}.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java b/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java new file mode 100644 index 0000000000000000000000000000000000000000..549744f168b00e5d47730f23dc471b524612c806 --- /dev/null +++ b/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java @@ -0,0 +1,94 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.log; + +import static org.junit.Assert.assertEquals; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.LoggerConfig; +import org.junit.Before; +import org.junit.Test; + +/** + * + */ +public class TestLog4j2Appenders { + + @Before + public void setup() { + // programmatically set root logger level to INFO. By default if log4j2-test.xml is not + // available root logger will use ERROR log level + LoggerContext ctx = (LoggerContext) LogManager.getContext(false); + Configuration config = ctx.getConfiguration(); + LoggerConfig loggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME); + loggerConfig.setLevel(Level.INFO); + ctx.updateLoggers(); + } + + @Test + public void testStringAppender() throws Exception { + // Get the RootLogger which, if you don't have log4j2-test.xml defined, will only log ERRORs + Logger logger = LogManager.getRootLogger(); + // Create a String Appender to capture log output + StringAppender appender = StringAppender.createStringAppender("%m"); + appender.addToLogger(logger.getName(), Level.INFO); + appender.start(); + + // Log to the string appender + logger.info("Hello!"); + logger.info(" World"); + + assertEquals("Hello! World", appender.getOutput()); + appender.removeFromLogger(LogManager.getRootLogger().getName()); + } + + @Test + public void testHiveEventCounterAppender() throws Exception { + Logger logger = LogManager.getRootLogger(); + HiveEventCounter appender = HiveEventCounter.createInstance("EventCounter", true, null, null); + appender.addToLogger(logger.getName(), Level.INFO); + appender.start(); + + logger.info("Test"); + logger.info("Test"); + logger.info("Test"); + logger.info("Test"); + + logger.error("Test"); + logger.error("Test"); + logger.error("Test"); + + logger.warn("Test"); + logger.warn("Test"); + + logger.fatal("Test"); + + // HiveEventCounter will be loaded from hive-log4j2-test.xml before tests are run. The 2 log + // info msgs from previous test case will also be counted along with 4 log info msgs in this + // test and hence we assert for 6 here + assertEquals(6, appender.getInfo()); + assertEquals(3, appender.getError()); + assertEquals(2, appender.getWarn()); + assertEquals(1, appender.getFatal()); + appender.removeFromLogger(LogManager.getRootLogger().getName()); + } +} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java index 99fbd5dd8086851363baf7e35cd43d8e341a3a4d..8c51946ef3085244d90fc70ce82208b825007466 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java @@ -20,7 +20,6 @@ import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME; -import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -29,8 +28,6 @@ import java.util.Map; import java.util.regex.Pattern; -import junit.framework.TestCase; - import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -46,6 +43,7 @@ import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.index.HiveIndex; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.log.StringAppender; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; @@ -56,15 +54,19 @@ import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; -import org.apache.log4j.PatternLayout; -import org.apache.log4j.WriterAppender; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.LoggerConfig; import org.apache.thrift.protocol.TBinaryProtocol; import org.junit.Assert; import com.google.common.collect.ImmutableMap; +import junit.framework.TestCase; + /** * TestHive. * @@ -248,36 +250,39 @@ public void testThriftTable() throws Throwable { * @throws Throwable */ public void testMetaStoreApiTiming() throws Throwable { - // set log level to DEBUG, as this is logged at debug level - Logger logger = Logger.getLogger("hive.ql.metadata.Hive"); - Level origLevel = logger.getLevel(); - logger.setLevel(Level.DEBUG); - - // create an appender to capture the logs in a string - StringWriter writer = new StringWriter(); - WriterAppender appender = new WriterAppender(new PatternLayout(), writer); + // Get the RootLogger which, if you don't have log4j2-test.xml defined, will only log ERRORs + Logger logger = LogManager.getLogger("hive.ql.metadata.Hive"); + Level oldLevel = logger.getLevel(); + LoggerContext ctx = (LoggerContext) LogManager.getContext(false); + Configuration config = ctx.getConfiguration(); + LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName()); + loggerConfig.setLevel(Level.DEBUG); + ctx.updateLoggers(); + + // Create a String Appender to capture log output + StringAppender appender = StringAppender.createStringAppender("%m"); + appender.addToLogger(logger.getName(), Level.DEBUG); + appender.start(); try { - logger.addAppender(appender); - hm.clearMetaCallTiming(); hm.getAllDatabases(); hm.dumpAndClearMetaCallTiming("test"); - String logStr = writer.toString(); + String logStr = appender.getOutput(); String expectedString = "getAllDatabases_()="; Assert.assertTrue(logStr + " should contain <" + expectedString, logStr.contains(expectedString)); // reset the log buffer, verify new dump without any api call does not contain func - writer.getBuffer().setLength(0); + appender.reset(); hm.dumpAndClearMetaCallTiming("test"); - logStr = writer.toString(); + logStr = appender.getOutput(); Assert.assertFalse(logStr + " should not contain <" + expectedString, logStr.contains(expectedString)); - } finally { - logger.setLevel(origLevel); - logger.removeAppender(appender); + loggerConfig.setLevel(oldLevel); + ctx.updateLoggers(); + appender.removeFromLogger(logger.getName()); } } diff --git a/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java b/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java index 9d64b102e008d4116e5e59cb6442de78984da649..876ade83c0d2122156a395fc95dfd71de6ae5bed 100644 --- a/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java +++ b/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java @@ -18,9 +18,6 @@ package org.apache.hive.service.cli; -import org.apache.log4j.Layout; -import org.apache.log4j.PatternLayout; - /** * CLIServiceUtils. * @@ -29,10 +26,6 @@ private static final char SEARCH_STRING_ESCAPE = '\\'; - public static final Layout verboseLayout = new PatternLayout( - "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n"); - public static final Layout nonVerboseLayout = new PatternLayout( - "%-5p : %m%n"); /** * Convert a SQL search pattern into an equivalent Java Regex. diff --git a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java index 70340bd13cbc1798642a790336c670571bf54ca7..036f110b90b58ad35f70f85f82e4c8e8c6a2e782 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java +++ b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java @@ -6,44 +6,84 @@ * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hive.service.cli.operation; -import java.io.CharArrayWriter; -import java.util.Enumeration; + +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.Serializable; import java.util.regex.Pattern; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.session.OperationLog; -import org.apache.hadoop.hive.ql.session.OperationLog.LoggingLevel; -import org.apache.hive.service.cli.CLIServiceUtils; -import org.apache.log4j.Appender; -import org.apache.log4j.ConsoleAppender; -import org.apache.log4j.Layout; -import org.apache.log4j.Logger; -import org.apache.log4j.WriterAppender; -import org.apache.log4j.spi.Filter; -import org.apache.log4j.spi.LoggingEvent; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.Layout; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.AbstractOutputStreamAppender; +import org.apache.logging.log4j.core.appender.ConsoleAppender; +import org.apache.logging.log4j.core.appender.OutputStreamManager; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.filter.AbstractFilter; +import org.apache.logging.log4j.core.layout.PatternLayout; import com.google.common.base.Joiner; /** - * An Appender to divert logs from individual threads to the LogObject they belong to. + * Divert appender to redirect operation logs to separate files. */ -public class LogDivertAppender extends WriterAppender { - private static final Logger LOG = Logger.getLogger(LogDivertAppender.class.getName()); +public class LogDivertAppender + extends AbstractOutputStreamAppender { + private static final Logger LOG = LogManager.getLogger(LogDivertAppender.class.getName()); + private static LoggerContext context = (LoggerContext) LogManager.getContext(false); + private static Configuration configuration = context.getConfiguration(); + public static final Layout verboseLayout = PatternLayout.createLayout( + "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n", configuration, null, null, true, false, null, null); + public static final Layout nonVerboseLayout = PatternLayout.createLayout( + "%-5p : %m%n", configuration, null, null, true, false, null, null); + private final OperationManager operationManager; + private StringOutputStreamManager manager; private boolean isVerbose; - private Layout verboseLayout; + private final Layout layout; + + /** + * Instantiate a WriterAppender and set the output destination to a + * new {@link OutputStreamWriter} initialized with os + * as its {@link OutputStream}. + * + * @param name The name of the Appender. + * @param layout The layout to format the message. + * @param filter Filter + * @param ignoreExceptions To ignore exceptions or not + * @param immediateFlush To flush immediately or not + * @param manager The OutputStreamManager. + * @param operationManager Operation manager + */ + protected LogDivertAppender(String name, + Layout layout, Filter filter, boolean ignoreExceptions, + boolean immediateFlush, + StringOutputStreamManager manager, OperationManager operationManager, + OperationLog.LoggingLevel loggingMode) { + super(name, layout, filter, ignoreExceptions, immediateFlush, manager); + this.layout = layout; + this.operationManager = operationManager; + this.manager = manager; + this.isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE); + } /** * A log filter that filters messages coming from the logger with the given names. @@ -52,31 +92,31 @@ * they don't generate more logs for themselves when they process logs. * White list filter is used for less verbose log collection */ - private static class NameFilter extends Filter { + private static class NameFilter extends AbstractFilter { private Pattern namePattern; - private LoggingLevel loggingMode; + private OperationLog.LoggingLevel loggingMode; private OperationManager operationManager; /* Patterns that are excluded in verbose logging level. * Filter out messages coming from log processing classes, or we'll run an infinite loop. */ private static final Pattern verboseExcludeNamePattern = Pattern.compile(Joiner.on("|"). - join(new String[] {LOG.getName(), OperationLog.class.getName(), - OperationManager.class.getName()})); + join(new String[]{LOG.getName(), OperationLog.class.getName(), + OperationManager.class.getName()})); /* Patterns that are included in execution logging level. * In execution mode, show only select logger messages. */ private static final Pattern executionIncludeNamePattern = Pattern.compile(Joiner.on("|"). - join(new String[] {"org.apache.hadoop.mapreduce.JobSubmitter", - "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName(), - "org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor"})); + join(new String[]{"org.apache.hadoop.mapreduce.JobSubmitter", + "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName(), + "org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor"})); /* Patterns that are included in performance logging level. * In performance mode, show execution and performance logger messages. */ private static final Pattern performanceIncludeNamePattern = Pattern.compile( - executionIncludeNamePattern.pattern() + "|" + PerfLogger.class.getName()); + executionIncludeNamePattern.pattern() + "|" + PerfLogger.class.getName()); private void setCurrentNamePattern(OperationLog.LoggingLevel mode) { if (mode == OperationLog.LoggingLevel.VERBOSE) { @@ -88,26 +128,25 @@ private void setCurrentNamePattern(OperationLog.LoggingLevel mode) { } } - public NameFilter( - OperationLog.LoggingLevel loggingMode, OperationManager op) { + public NameFilter(OperationLog.LoggingLevel loggingMode, OperationManager op) { this.operationManager = op; this.loggingMode = loggingMode; setCurrentNamePattern(loggingMode); } @Override - public int decide(LoggingEvent ev) { + public Result filter(LogEvent event) { OperationLog log = operationManager.getOperationLogByThread(); boolean excludeMatches = (loggingMode == OperationLog.LoggingLevel.VERBOSE); if (log == null) { - return Filter.DENY; + return Result.DENY; } OperationLog.LoggingLevel currentLoggingMode = log.getOpLoggingLevel(); // If logging is disabled, deny everything. if (currentLoggingMode == OperationLog.LoggingLevel.NONE) { - return Filter.DENY; + return Result.DENY; } // Look at the current session's setting // and set the pattern and excludeMatches accordingly. @@ -116,88 +155,85 @@ public int decide(LoggingEvent ev) { setCurrentNamePattern(loggingMode); } - boolean isMatch = namePattern.matcher(ev.getLoggerName()).matches(); + boolean isMatch = namePattern.matcher(event.getLoggerName()).matches(); if (excludeMatches == isMatch) { // Deny if this is black-list filter (excludeMatches = true) and it - // matched - // or if this is whitelist filter and it didn't match - return Filter.DENY; + // matched or if this is whitelist filter and it didn't match + return Result.DENY; } - return Filter.NEUTRAL; + return Result.NEUTRAL; } } - /** This is where the log message will go to */ - private final CharArrayWriter writer = new CharArrayWriter(); + private static Layout initLayout(boolean isVerbose) { + // There should be a ConsoleAppender. Copy its Layout. + Logger root = LogManager.getRootLogger(); + Layout layout = null; + + for (Appender ap : ((org.apache.logging.log4j.core.Logger) root).getAppenders().values()) { + if (ap.getClass().equals(ConsoleAppender.class)) { + layout = ap.getLayout(); + break; + } + } - private void setLayout (boolean isVerbose, Layout lo) { if (isVerbose) { - if (lo == null) { - lo = CLIServiceUtils.verboseLayout; + if (layout == null) { + layout = verboseLayout; LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern."); } } else { - lo = CLIServiceUtils.nonVerboseLayout; + layout = nonVerboseLayout; } - setLayout(lo); + + return layout; } - private void initLayout(boolean isVerbose) { - // There should be a ConsoleAppender. Copy its Layout. - Logger root = Logger.getRootLogger(); - Layout layout = null; + public static LogDivertAppender createInstance(OperationManager operationManager, + OperationLog.LoggingLevel loggingMode) { + boolean isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE); + Layout layout = initLayout(isVerbose); + Filter filter = new NameFilter(loggingMode, operationManager); + return new LogDivertAppender("LogDivertAppender", layout, filter, false, true, + new StringOutputStreamManager(new ByteArrayOutputStream(), "StringStream", layout), + operationManager, loggingMode); + } - Enumeration appenders = root.getAllAppenders(); - while (appenders.hasMoreElements()) { - Appender ap = (Appender) appenders.nextElement(); - if (ap.getClass().equals(ConsoleAppender.class)) { - layout = ap.getLayout(); - break; - } - } - setLayout(isVerbose, layout); + public String getOutput() { + return new String(manager.getStream().toByteArray()); } - public LogDivertAppender(OperationManager operationManager, - OperationLog.LoggingLevel loggingMode) { - isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE); - initLayout(isVerbose); - setWriter(writer); - setName("LogDivertAppender"); - this.operationManager = operationManager; - this.verboseLayout = isVerbose ? layout : CLIServiceUtils.verboseLayout; - addFilter(new NameFilter(loggingMode, operationManager)); + @Override + public void start() { + super.start(); } @Override - public void doAppend(LoggingEvent event) { - OperationLog log = operationManager.getOperationLogByThread(); + public Layout getLayout() { - // Set current layout depending on the verbose/non-verbose mode. + // If there is a logging level change from verbose->non-verbose or vice-versa since + // the last subAppend call, change the layout to preserve consistency. + OperationLog log = operationManager.getOperationLogByThread(); if (log != null) { - boolean isCurrModeVerbose = (log.getOpLoggingLevel() == OperationLog.LoggingLevel.VERBOSE); + isVerbose = (log.getOpLoggingLevel() == OperationLog.LoggingLevel.VERBOSE); + } - // If there is a logging level change from verbose->non-verbose or vice-versa since - // the last subAppend call, change the layout to preserve consistency. - if (isCurrModeVerbose != isVerbose) { - isVerbose = isCurrModeVerbose; - setLayout(isVerbose, verboseLayout); - } + // layout is immutable in log4j2, so we cheat here and return a different layout when + // verbosity changes + if (isVerbose) { + return verboseLayout; + } else { + return nonVerboseLayout; } - super.doAppend(event); } - /** - * Overrides WriterAppender.subAppend(), which does the real logging. No need - * to worry about concurrency since log4j calls this synchronously. - */ @Override - protected void subAppend(LoggingEvent event) { - super.subAppend(event); - // That should've gone into our writer. Notify the LogContext. - String logOutput = writer.toString(); - writer.reset(); + public void append(LogEvent event) { + super.append(event); + + String logOutput = getOutput(); + manager.reset(); OperationLog log = operationManager.getOperationLogByThread(); if (log == null) { @@ -206,4 +242,27 @@ protected void subAppend(LoggingEvent event) { } log.writeOperationLog(logOutput); } + + protected static class StringOutputStreamManager extends OutputStreamManager { + ByteArrayOutputStream stream; + + protected StringOutputStreamManager(ByteArrayOutputStream os, String streamName, + Layout layout) { + super(os, streamName, layout); + stream = os; + } + + public ByteArrayOutputStream getStream() { + return stream; + } + + public void reset() { + stream.reset(); + } + + @Override + protected void writeFooter() { + super.writeFooter(); + } + } } diff --git a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java index 9b0a51958adb54f2521f9752edfe5a9d9735ce41..27895c1222673c7b43f1a484809e04117a1826d8 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java +++ b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java @@ -41,8 +41,11 @@ import org.apache.hive.service.cli.RowSetFactory; import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; -import org.apache.log4j.Appender; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.LoggerConfig; /** * OperationManager. @@ -50,7 +53,8 @@ */ public class OperationManager extends AbstractService { private final Log LOG = LogFactory.getLog(OperationManager.class.getName()); - + private LoggerContext context = (LoggerContext) LogManager.getContext(false); + private Configuration configuration = context.getConfiguration(); private final Map handleToOperation = new HashMap(); @@ -83,8 +87,11 @@ public synchronized void stop() { private void initOperationLogCapture(String loggingMode) { // Register another Appender (with the same layout) that talks to us. - Appender ap = new LogDivertAppender(this, OperationLog.getLoggingLevel(loggingMode)); - Logger.getRootLogger().addAppender(ap); + Appender ap = LogDivertAppender.createInstance(this, OperationLog.getLoggingLevel(loggingMode)); + LoggerConfig loggerConfig = configuration.getLoggerConfig(LogManager.getLogger().getName()); + loggerConfig.addAppender(ap, null, null); + context.updateLoggers(); + ap.start(); } public ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession, diff --git a/shims/common/pom.xml b/shims/common/pom.xml index 9e9a3b731e17ddb7e8b1f9c5f832e7e350ebfc27..dfdec2bd2ce51e9cdeb2705f29f69f90440df9fa 100644 --- a/shims/common/pom.xml +++ b/shims/common/pom.xml @@ -41,14 +41,19 @@ ${commons-logging.version} - log4j - log4j - ${log4j.version} + org.apache.logging.log4j + log4j-1.2-api + ${log4j2.version} - log4j - apache-log4j-extras - ${log4j-extras.version} + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j2.version} + + + org.apache.logging.log4j + log4j-jcl + ${log4j2.version} com.google.guava diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HiveEventCounter.java b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HiveEventCounter.java deleted file mode 100644 index 224b1356c5f3c1683a4ffb39ccbef10fd8d0d930..0000000000000000000000000000000000000000 --- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HiveEventCounter.java +++ /dev/null @@ -1,102 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.shims; - -import org.apache.log4j.Appender; -import org.apache.log4j.AppenderSkeleton; -import org.apache.log4j.Layout; -import org.apache.log4j.spi.ErrorHandler; -import org.apache.log4j.spi.Filter; -import org.apache.log4j.spi.LoggingEvent; -import org.apache.log4j.spi.OptionHandler; - -public class HiveEventCounter implements Appender, OptionHandler { - - AppenderSkeleton hadoopEventCounter; - - public HiveEventCounter() { - hadoopEventCounter = ShimLoader.getEventCounter(); - } - - @Override - public void close() { - hadoopEventCounter.close(); - } - - @Override - public boolean requiresLayout() { - return hadoopEventCounter.requiresLayout(); - } - - @Override - public void addFilter(Filter filter) { - hadoopEventCounter.addFilter(filter); - } - - @Override - public void clearFilters() { - hadoopEventCounter.clearFilters(); - } - - @Override - public void doAppend(LoggingEvent event) { - hadoopEventCounter.doAppend(event); - } - - @Override - public ErrorHandler getErrorHandler() { - return hadoopEventCounter.getErrorHandler(); - } - - @Override - public Filter getFilter() { - return hadoopEventCounter.getFilter(); - } - - @Override - public Layout getLayout() { - return hadoopEventCounter.getLayout(); - } - - @Override - public String getName() { - return hadoopEventCounter.getName(); - } - - @Override - public void setErrorHandler(ErrorHandler handler) { - hadoopEventCounter.setErrorHandler(handler); - } - - @Override - public void setLayout(Layout layout) { - hadoopEventCounter.setLayout(layout); - } - - @Override - public void setName(String name) { - hadoopEventCounter.setName(name); - } - - @Override - public void activateOptions() { - hadoopEventCounter.activateOptions(); - } - -} diff --git a/spark-client/src/test/resources/log4j.properties b/spark-client/src/test/resources/log4j.properties deleted file mode 100644 index 93a60cc340e7c626c2ae434fce7d6077344b3807..0000000000000000000000000000000000000000 --- a/spark-client/src/test/resources/log4j.properties +++ /dev/null @@ -1,23 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Set everything to be logged to the file target/unit-tests.log -log4j.rootCategory=DEBUG, console -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n diff --git a/spark-client/src/test/resources/log4j2.xml b/spark-client/src/test/resources/log4j2.xml new file mode 100644 index 0000000000000000000000000000000000000000..cd9cbafad74198b696bff23ee4ce62028a5d8d8c --- /dev/null +++ b/spark-client/src/test/resources/log4j2.xml @@ -0,0 +1,39 @@ + + + + + + + DEBUG + console + + + + + + + + + + + + + + + diff --git a/testutils/ptest2/src/main/resources/log4j.properties b/testutils/ptest2/src/main/resources/log4j.properties deleted file mode 100644 index edb9696c4ba681b332f4a4775e2618f07553fab9..0000000000000000000000000000000000000000 --- a/testutils/ptest2/src/main/resources/log4j.properties +++ /dev/null @@ -1,37 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -hive.ptest.logdir=target - -log4j.rootLogger=DEBUG,FILE -log4j.threshhold=ALL - -log4j.appender.FILE=org.apache.log4j.RollingFileAppender -log4j.appender.FILE.File=${hive.ptest.logdir}/ptest.log -log4j.appender.FILE.MaxFileSize=50MB -log4j.appender.FILE.MaxBackupIndex=1 -log4j.appender.FILE.layout=org.apache.log4j.PatternLayout -log4j.appender.FILE.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n - -log4j.logger.org.apache.http=INFO -log4j.logger.org.springframework=INFO -log4j.logger.org.jclouds=INFO -log4j.logger.jclouds=INFO -log4j.logger.org.apache.hive=DEBUG -log4j.logger.org.apache.http=TRACE - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN diff --git a/testutils/ptest2/src/main/resources/log4j2.xml b/testutils/ptest2/src/main/resources/log4j2.xml new file mode 100644 index 0000000000000000000000000000000000000000..2fe68dda9cef1778afeb647351a649522698f231 --- /dev/null +++ b/testutils/ptest2/src/main/resources/log4j2.xml @@ -0,0 +1,78 @@ + + + + + + + DEBUG + FILE + target + ptest.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +