diff --git a/beeline/src/main/resources/beeline-log4j.properties b/beeline/src/main/resources/beeline-log4j.properties
deleted file mode 100644
index fe47d94..0000000
--- a/beeline/src/main/resources/beeline-log4j.properties
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-log4j.rootLogger=WARN, console
-
-######## console appender ########
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
-log4j.appender.console.encoding=UTF-8
diff --git a/beeline/src/main/resources/beeline-log4j2.xml b/beeline/src/main/resources/beeline-log4j2.xml
new file mode 100644
index 0000000..dd1f076
--- /dev/null
+++ b/beeline/src/main/resources/beeline-log4j2.xml
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+ ALL
+ WARN
+ console
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/bin/ext/beeline.sh b/bin/ext/beeline.sh
index ab3dc1a..9de8f6c 100644
--- a/bin/ext/beeline.sh
+++ b/bin/ext/beeline.sh
@@ -31,7 +31,7 @@ beeline () {
hadoopClasspath="${HADOOP_CLASSPATH}:"
fi
export HADOOP_CLASSPATH="${hadoopClasspath}${HIVE_CONF_DIR}:${beelineJarPath}:${superCsvJarPath}:${jlineJarPath}:${jdbcStandaloneJarPath}"
- export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configuration=beeline-log4j.properties "
+ export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=beeline-log4j2.xml "
exec $HADOOP jar ${beelineJarPath} $CLASS $HIVE_OPTS "$@"
}
diff --git a/bin/hive b/bin/hive
index 5dc93fb..9190461 100755
--- a/bin/hive
+++ b/bin/hive
@@ -276,6 +276,8 @@ for i in "$bin"/ext/util/*.sh ; do
. $i
done
+export HADOOP_CLIENT_OPTS=" -Dlog4j.configurationFile=hive-log4j2.xml "
+
if [ "$DEBUG" ]; then
if [ "$HELP" ]; then
debug_help
diff --git a/common/pom.xml b/common/pom.xml
index aedf7ba..532efba 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -61,14 +61,19 @@
${joda.version}
- log4j
- log4j
- ${log4j.version}
+ org.apache.logging.log4j
+ log4j-1.2-api
+ ${log4j2.version}
- log4j
- apache-log4j-extras
- ${log4j-extras.version}
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j2.version}
+
+
+ org.apache.logging.log4j
+ log4j-jcl
+ ${log4j2.version}org.apache.commons
diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
index 9118675..5b4297d 100644
--- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
@@ -18,26 +18,22 @@
package org.apache.hadoop.hive.common;
-import java.net.URL;
import java.io.File;
-import java.io.IOException;
-import java.io.FileNotFoundException;
+import java.net.URL;
-import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.PropertyConfigurator;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.logging.log4j.core.config.Configurator;
/**
* Utilities common to logging operations.
*/
public class LogUtils {
- private static final String HIVE_L4J = "hive-log4j.properties";
- private static final String HIVE_EXEC_L4J = "hive-exec-log4j.properties";
+ private static final String HIVE_L4J = "hive-log4j2.xml";
+ private static final String HIVE_EXEC_L4J = "hive-exec-log4j2.xml";
private static final Log l4j = LogFactory.getLog(LogUtils.class);
@SuppressWarnings("serial")
@@ -95,8 +91,7 @@ private static String initHiveLog4jCommon(ConfVars confVarName)
}
System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
}
- LogManager.resetConfiguration();
- PropertyConfigurator.configure(log4jFileName);
+ Configurator.initialize(null, log4jFileName);
logConfigLocation(conf);
return ("Logging initialized using configuration in " + log4jConfigFile);
}
@@ -123,8 +118,7 @@ private static String initHiveLog4jDefault(
break;
}
if (hive_l4j != null) {
- LogManager.resetConfiguration();
- PropertyConfigurator.configure(hive_l4j);
+ Configurator.initialize(null, hive_l4j.getPath());
logConfigLocation(conf);
return (logMessage + "\n" + "Logging initialized using configuration in " + hive_l4j);
} else {
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 39477d6..d638684 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1693,13 +1693,13 @@ public void setSparkConfigUpdated(boolean isSparkConfigUpdated) {
// logging configuration
HIVE_LOG4J_FILE("hive.log4j.file", "",
"Hive log4j configuration file.\n" +
- "If the property is not set, then logging will be initialized using hive-log4j.properties found on the classpath.\n" +
- "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.properties\"), \n" +
+ "If the property is not set, then logging will be initialized using hive-log4j2.xml found on the classpath.\n" +
+ "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.xml\"), \n" +
"which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."),
HIVE_EXEC_LOG4J_FILE("hive.exec.log4j.file", "",
"Hive log4j configuration file for execution mode(sub command).\n" +
- "If the property is not set, then logging will be initialized using hive-exec-log4j.properties found on the classpath.\n" +
- "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.properties\"), \n" +
+ "If the property is not set, then logging will be initialized using hive-exec-log4j2.xml found on the classpath.\n" +
+ "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.xml\"), \n" +
"which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."),
HIVE_LOG_EXPLAIN_OUTPUT("hive.log.explain.output", false,
diff --git a/common/src/main/resources/hive-log4j.properties b/common/src/main/resources/hive-log4j.properties
deleted file mode 100644
index 14fa725..0000000
--- a/common/src/main/resources/hive-log4j.properties
+++ /dev/null
@@ -1,88 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hive.log.threshold=ALL
-hive.root.logger=INFO,DRFA
-hive.log.dir=${java.io.tmpdir}/${user.name}
-hive.log.file=hive.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshold=${hive.log.threshold}
-
-#
-# Daily Rolling File Appender
-#
-# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files
-# for different CLI session.
-#
-# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-
-log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
-log4j.appender.console.encoding=UTF-8
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
-
-
-log4j.category.DataNucleus=ERROR,DRFA
-log4j.category.Datastore=ERROR,DRFA
-log4j.category.Datastore.Schema=ERROR,DRFA
-log4j.category.JPOX.Datastore=ERROR,DRFA
-log4j.category.JPOX.Plugin=ERROR,DRFA
-log4j.category.JPOX.MetaData=ERROR,DRFA
-log4j.category.JPOX.Query=ERROR,DRFA
-log4j.category.JPOX.General=ERROR,DRFA
-log4j.category.JPOX.Enhancer=ERROR,DRFA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA
diff --git a/common/src/main/resources/hive-log4j2.xml b/common/src/main/resources/hive-log4j2.xml
new file mode 100644
index 0000000..bfecc7f
--- /dev/null
+++ b/common/src/main/resources/hive-log4j2.xml
@@ -0,0 +1,112 @@
+
+
+
+
+
+
+ ALL
+ INFO
+ DRFA
+ ${sys:java.io.tmpdir}/${sys:user.name}
+ hive.log
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
index d5cedb1..92269e7 100644
--- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
+++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java
@@ -21,12 +21,12 @@
import java.io.File;
import java.io.InputStreamReader;
-import junit.framework.TestCase;
-
import org.apache.hadoop.hive.common.LogUtils;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.common.util.HiveTestUtils;
+import junit.framework.TestCase;
+
/**
* TestHiveLogging
*
@@ -104,9 +104,9 @@ public void testHiveLogging() throws Exception {
// customized log4j config log file to be: /${test.tmp.dir}/TestHiveLogging/hiveLog4jTest.log
File customLogPath = new File(new File(System.getProperty("test.tmp.dir")),
System.getProperty("user.name") + "-TestHiveLogging/");
- String customLogName = "hiveLog4jTest.log";
+ String customLogName = "hiveLog4j2Test.log";
File customLogFile = new File(customLogPath, customLogName);
RunTest(customLogFile,
- "hive-log4j-test.properties", "hive-exec-log4j-test.properties");
+ "hive-log4j2-test.xml", "hive-exec-log4j2-test.xml");
}
}
diff --git a/common/src/test/resources/hive-exec-log4j-test.properties b/common/src/test/resources/hive-exec-log4j-test.properties
deleted file mode 100644
index 1e53f26..0000000
--- a/common/src/test/resources/hive-exec-log4j-test.properties
+++ /dev/null
@@ -1,59 +0,0 @@
-# Define some default values that can be overridden by system properties
-hive.root.logger=INFO,FA
-hive.log.dir=/${test.tmp.dir}/${user.name}-TestHiveLogging
-hive.log.file=hiveExecLog4jTest.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=WARN
-
-#
-# File Appender
-#
-
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.File=${hive.log.dir}/${hive.log.file}
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
-
-
-log4j.category.DataNucleus=ERROR,FA
-log4j.category.Datastore=ERROR,FA
-log4j.category.Datastore.Schema=ERROR,FA
-log4j.category.JPOX.Datastore=ERROR,FA
-log4j.category.JPOX.Plugin=ERROR,FA
-log4j.category.JPOX.MetaData=ERROR,FA
-log4j.category.JPOX.Query=ERROR,FA
-log4j.category.JPOX.General=ERROR,FA
-log4j.category.JPOX.Enhancer=ERROR,FA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,FA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,FA
diff --git a/common/src/test/resources/hive-exec-log4j2-test.xml b/common/src/test/resources/hive-exec-log4j2-test.xml
new file mode 100644
index 0000000..f64c64c
--- /dev/null
+++ b/common/src/test/resources/hive-exec-log4j2-test.xml
@@ -0,0 +1,86 @@
+
+
+
+
+
+
+ ALL
+ INFO
+ FA
+ ${sys:test.tmp.dir}/${sys:user.name}-TestHiveLogging
+ hiveExecLog4j2Test.log
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/common/src/test/resources/hive-log4j-test.properties b/common/src/test/resources/hive-log4j-test.properties
deleted file mode 100644
index 0348325..0000000
--- a/common/src/test/resources/hive-log4j-test.properties
+++ /dev/null
@@ -1,71 +0,0 @@
-# Define some default values that can be overridden by system properties
-hive.root.logger=WARN,DRFA
-hive.log.dir=${test.tmp.dir}/${user.name}-TestHiveLogging
-hive.log.file=hiveLog4jTest.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=WARN
-
-#
-# Daily Rolling File Appender
-#
-# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files
-# for different CLI session.
-#
-# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-
-log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-log4j.appender.console.encoding=UTF-8
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
-
-
-log4j.category.DataNucleus=ERROR,DRFA
-log4j.category.Datastore=ERROR,DRFA
-log4j.category.Datastore.Schema=ERROR,DRFA
-log4j.category.JPOX.Datastore=ERROR,DRFA
-log4j.category.JPOX.Plugin=ERROR,DRFA
-log4j.category.JPOX.MetaData=ERROR,DRFA
-log4j.category.JPOX.Query=ERROR,DRFA
-log4j.category.JPOX.General=ERROR,DRFA
-log4j.category.JPOX.Enhancer=ERROR,DRFA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA
diff --git a/common/src/test/resources/hive-log4j2-test.xml b/common/src/test/resources/hive-log4j2-test.xml
new file mode 100644
index 0000000..9b261ca
--- /dev/null
+++ b/common/src/test/resources/hive-log4j2-test.xml
@@ -0,0 +1,95 @@
+
+
+
+
+
+
+ ALL
+ WARN
+ DRFA
+ ${sys:test.tmp.dir}/${sys:user.name}-TestHiveLogging
+ hiveLog4j2Test.log
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/data/conf/hive-log4j-old.properties b/data/conf/hive-log4j-old.properties
deleted file mode 100644
index f274b8c..0000000
--- a/data/conf/hive-log4j-old.properties
+++ /dev/null
@@ -1,82 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hive.root.logger=DEBUG,DRFA
-hive.log.dir=${build.dir.hive}/ql/tmp/
-hive.log.file=hive.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=WARN
-
-#
-# Daily Rolling File Appender
-#
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
-
-
-log4j.category.DataNucleus=ERROR,DRFA
-log4j.category.Datastore=ERROR,DRFA
-log4j.category.Datastore.Schema=ERROR,DRFA
-log4j.category.JPOX.Datastore=ERROR,DRFA
-log4j.category.JPOX.Plugin=ERROR,DRFA
-log4j.category.JPOX.MetaData=ERROR,DRFA
-log4j.category.JPOX.Query=ERROR,DRFA
-log4j.category.JPOX.General=ERROR,DRFA
-log4j.category.JPOX.Enhancer=ERROR,DRFA
-log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA
diff --git a/data/conf/hive-log4j.properties b/data/conf/hive-log4j.properties
deleted file mode 100644
index 023e3c2..0000000
--- a/data/conf/hive-log4j.properties
+++ /dev/null
@@ -1,97 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hive.root.logger=DEBUG,DRFA
-hive.log.dir=${test.tmp.dir}/log/
-hive.log.file=hive.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=WARN
-
-#
-# Daily Rolling File Appender
-#
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
-
-# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job!
-# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy.
-# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html
-# Add "DAILY" to hive.root.logger above if you want to use this.
-log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender
-log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy
-log4j.appender.DAILY.rollingPolicy.ActiveFileName=${hive.log.dir}/${hive.log.file}
-log4j.appender.DAILY.rollingPolicy.FileNamePattern=${hive.log.dir}/${hive.log.file}.%d{yyyy-MM-dd}
-log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout
-log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
-
-
-log4j.category.DataNucleus=ERROR,DRFA
-log4j.category.Datastore=ERROR,DRFA
-log4j.category.Datastore.Schema=ERROR,DRFA
-log4j.category.JPOX.Datastore=ERROR,DRFA
-log4j.category.JPOX.Plugin=ERROR,DRFA
-log4j.category.JPOX.MetaData=ERROR,DRFA
-log4j.category.JPOX.Query=ERROR,DRFA
-log4j.category.JPOX.General=ERROR,DRFA
-log4j.category.JPOX.Enhancer=ERROR,DRFA
-log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA
-log4j.logger.org.apache.zookeeper=INFO,DRFA
-log4j.logger.org.apache.zookeeper.server.ServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocket=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA
-log4j.logger.org.apache.hadoop.hive.ql.log.PerfLogger=${hive.ql.log.PerfLogger.level}
-log4j.logger.org.apache.hadoop.hive.ql.exec.Operator=INFO,DRFA
-log4j.logger.org.apache.hadoop.hive.serde2.lazy=INFO,DRFA
-log4j.logger.org.apache.hadoop.hive.metastore.ObjectStore=INFO,DRFA
diff --git a/data/conf/hive-log4j2.xml b/data/conf/hive-log4j2.xml
new file mode 100644
index 0000000..59d6192
--- /dev/null
+++ b/data/conf/hive-log4j2.xml
@@ -0,0 +1,148 @@
+
+
+
+
+
+
+ ALL
+ DEBUG
+ DRFA
+ ${sys:test.tmp.dir}/log
+ hive.log
+ INFO
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/data/conf/spark/log4j.properties b/data/conf/spark/log4j.properties
deleted file mode 100644
index 8838c0e..0000000
--- a/data/conf/spark/log4j.properties
+++ /dev/null
@@ -1,24 +0,0 @@
-log4j.rootCategory=DEBUG, DRFA
-
-hive.spark.log.dir=target/tmp/log
-# Settings to quiet third party logs that are too verbose
-log4j.logger.org.eclipse.jetty=WARN
-log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR
-log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
-log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-
-log4j.appender.DRFA.File=${hive.spark.log.dir}/spark.log
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
diff --git a/data/conf/spark/log4j2.xml b/data/conf/spark/log4j2.xml
new file mode 100644
index 0000000..26314ad
--- /dev/null
+++ b/data/conf/spark/log4j2.xml
@@ -0,0 +1,72 @@
+
+
+
+
+
+
+ DEBUG
+ DRFA
+ target/tmp/log
+ spark.log
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/xdocs/language_manual/cli.xml b/docs/xdocs/language_manual/cli.xml
index a293382..eb91e44 100644
--- a/docs/xdocs/language_manual/cli.xml
+++ b/docs/xdocs/language_manual/cli.xml
@@ -163,7 +163,7 @@ Sample Usage:
-Hive uses log4j for logging. These logs are not emitted to the standard output by default but are instead captured to a log file specified by Hive's log4j properties file. By default Hive will use hive-log4j.default in the conf/ directory of the hive installation which writes out logs to /tmp/$USER/hive.log and uses the WARN level.
+Hive uses log4j for logging. These logs are not emitted to the standard output by default but are instead captured to a log file specified by Hive's log4j properties file. By default Hive will use hive-log4j2.xml in the conf/ directory of the hive installation which writes out logs to /tmp/$USER/hive.log and uses the WARN level.
It is often desirable to emit the logs to the standard output and/or change the logging level for debugging purposes. These can be done from the command line as follows:
diff --git a/hcatalog/bin/hcat_server.sh b/hcatalog/bin/hcat_server.sh
index 6b09d3e..d1aecb8 100644
--- a/hcatalog/bin/hcat_server.sh
+++ b/hcatalog/bin/hcat_server.sh
@@ -84,7 +84,7 @@ function start_hcat() {
export AUX_CLASSPATH=${AUX_CLASSPATH}
export HADOOP_HOME=$HADOOP_HOME
- #export HADOOP_OPTS="-Dlog4j.configuration=file://${HCAT_PREFIX}/conf/log4j.properties"
+ #export HADOOP_OPTS="-Dlog4j.configurationFile=file://${HCAT_PREFIX}/conf/log4j2.xml"
export HADOOP_OPTS="${HADOOP_OPTS} -server -XX:+UseConcMarkSweepGC -XX:ErrorFile=${HCAT_LOG_DIR}/hcat_err_pid%p.log -Xloggc:${HCAT_LOG_DIR}/hcat_gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps"
export HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE:-2048} # 8G is better if you have it
export METASTORE_PORT=${METASTORE_PORT:-9083}
diff --git a/hcatalog/bin/templeton.cmd b/hcatalog/bin/templeton.cmd
index e9a735d..759f654 100644
--- a/hcatalog/bin/templeton.cmd
+++ b/hcatalog/bin/templeton.cmd
@@ -59,9 +59,9 @@ setlocal enabledelayedexpansion
if not defined TEMPLETON_LOG4J (
@rem must be prefixed with file: otherwise config is not picked up
- set TEMPLETON_LOG4J=file:%WEBHCAT_CONF_DIR%\webhcat-log4j.properties
+ set TEMPLETON_LOG4J=file:%WEBHCAT_CONF_DIR%\webhcat-log4j2.xml
)
- set TEMPLETON_OPTS=-Dtempleton.log.dir=%TEMPLETON_LOG_DIR% -Dlog4j.configuration=%TEMPLETON_LOG4J% %HADOOP_OPTS%
+ set TEMPLETON_OPTS=-Dtempleton.log.dir=%TEMPLETON_LOG_DIR% -Dlog4j.configurationFile=%TEMPLETON_LOG4J% %HADOOP_OPTS%
set arguments=%JAVA_HEAP_MAX% %TEMPLETON_OPTS% -classpath %CLASSPATH% org.apache.hive.hcatalog.templeton.Main
if defined service_entry (
diff --git a/hcatalog/scripts/hcat_server_start.sh b/hcatalog/scripts/hcat_server_start.sh
index 1670b70..872d1b5 100755
--- a/hcatalog/scripts/hcat_server_start.sh
+++ b/hcatalog/scripts/hcat_server_start.sh
@@ -70,7 +70,7 @@ export AUX_CLASSPATH=${AUX_CLASSPATH}
export HADOOP_HOME=$HADOOP_HOME
-#export HADOOP_OPTS="-Dlog4j.configuration=file://${ROOT}/conf/log4j.properties"
+#export HADOOP_OPTS="-Dlog4j.configurationFile=file://${ROOT}/conf/log4j2.xml"
export HADOOP_OPTS="${HADOOP_OPTS} -server -XX:+UseConcMarkSweepGC -XX:ErrorFile=${ROOT}/var/log/hcat_err_pid%p.log -Xloggc:${ROOT}/var/log/hcat_gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps"
export HADOOP_HEAPSIZE=2048 # 8G is better if you have it
diff --git a/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml b/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml
index 9757b9c..6385e40 100644
--- a/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml
+++ b/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml
@@ -66,7 +66,7 @@ ${env.PIG_HOME}/bin/pig
uncertainty.
Note: The location of the log files created by Templeton and some other properties
- of the logging system are set in the webhcat-log4j.properties file.
+ of the logging system are set in the webhcat-log4j2.xml file.
Variables
diff --git a/hcatalog/src/docs/src/documentation/content/xdocs/install.xml b/hcatalog/src/docs/src/documentation/content/xdocs/install.xml
index 16da248..e2953a9 100644
--- a/hcatalog/src/docs/src/documentation/content/xdocs/install.xml
+++ b/hcatalog/src/docs/src/documentation/content/xdocs/install.xml
@@ -241,7 +241,7 @@
Server activity logs are located in
root/var/log/hcat_server. Logging configuration is located at
- root/conf/log4j.properties. Server logging uses
+ root/conf/log4j2.xml. Server logging uses
DailyRollingFileAppender by default. It will generate a new
file per day and does not expire old log files automatically.
diff --git a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties
deleted file mode 100644
index 82684b3..0000000
--- a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties
+++ /dev/null
@@ -1,88 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hive.log.threshold=ALL
-hive.root.logger=DEBUG,DRFA
-hive.log.dir=/tmp/ekoifman
-hive.log.file=hive.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshold=${hive.log.threshold}
-
-#
-# Daily Rolling File Appender
-#
-# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files
-# for different CLI session.
-#
-# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender
-
-log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-
-log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
-
-# 30-day backup
-#log4j.appender.DRFA.MaxBackupIndex=30
-log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
-log4j.appender.console.encoding=UTF-8
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
-
-
-log4j.category.DataNucleus=ERROR,DRFA
-log4j.category.Datastore=ERROR,DRFA
-log4j.category.Datastore.Schema=ERROR,DRFA
-log4j.category.JPOX.Datastore=ERROR,DRFA
-log4j.category.JPOX.Plugin=ERROR,DRFA
-log4j.category.JPOX.MetaData=ERROR,DRFA
-log4j.category.JPOX.Query=ERROR,DRFA
-log4j.category.JPOX.General=ERROR,DRFA
-log4j.category.JPOX.Enhancer=ERROR,DRFA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA
diff --git a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml
new file mode 100644
index 0000000..de00e62
--- /dev/null
+++ b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml
@@ -0,0 +1,112 @@
+
+
+
+
+
+
+ ALL
+ DEBUG
+ DRFA
+ ${sys:java.io.tmpdir}/${sys:user.name}
+ hive.log
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh b/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh
index 8cc9353..e59177c 100755
--- a/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh
+++ b/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh
@@ -31,7 +31,7 @@ cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.
#cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mssql.xml ${HIVE_HOME}/conf/hive-site.xml
cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml ${HIVE_HOME}/hcatalog/etc/webhcat/webhcat-site.xml
-cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties ${HIVE_HOME}/conf/hive-log4j.properties
+cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml ${HIVE_HOME}/conf/hive-log4j2.xml
if [ -f ${MYSQL_CLIENT_JAR} ]; then
cp ${MYSQL_CLIENT_JAR} ${HIVE_HOME}/lib
diff --git a/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh b/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
index 0be8dde..c80fdd5 100644
--- a/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
+++ b/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh
@@ -215,11 +215,11 @@ else
fi
if [[ -z "$WEBHCAT_LOG4J" ]]; then
- WEBHCAT_LOG4J="file://$base_dir/etc/webhcat/webhcat-log4j.properties";
+ WEBHCAT_LOG4J="file://$base_dir/etc/webhcat/webhcat-log4j2.xml";
fi
export HADOOP_USER_CLASSPATH_FIRST=true
-export HADOOP_OPTS="${HADOOP_OPTS} -Dwebhcat.log.dir=$WEBHCAT_LOG_DIR -Dlog4j.configuration=$WEBHCAT_LOG4J"
+export HADOOP_OPTS="${HADOOP_OPTS} -Dwebhcat.log.dir=$WEBHCAT_LOG_DIR -Dlog4j.configurationFile=$WEBHCAT_LOG4J"
start_cmd="$HADOOP_PREFIX/bin/hadoop jar $JAR org.apache.hive.hcatalog.templeton.Main "
diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties
deleted file mode 100644
index 866052c..0000000
--- a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties
+++ /dev/null
@@ -1,45 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Define some default values that can be overridden by system properties
-webhcat.root.logger = INFO, standard
-webhcat.log.dir = .
-webhcat.log.file = webhcat.log
-
-log4j.rootLogger = ${webhcat.root.logger}
-
-# Logging Threshold
-log4j.threshhold = DEBUG
-
-log4j.appender.standard = org.apache.log4j.DailyRollingFileAppender
-log4j.appender.standard.File = ${webhcat.log.dir}/${webhcat.log.file}
-
-# Rollver at midnight
-log4j.appender.DRFA.DatePattern = .yyyy-MM-dd
-
-log4j.appender.DRFA.layout = org.apache.log4j.PatternLayout
-
-log4j.appender.standard.layout = org.apache.log4j.PatternLayout
-log4j.appender.standard.layout.conversionPattern = %-5p | %d{DATE} | %c | %m%n
-
-# Class logging settings
-log4j.logger.com.sun.jersey = DEBUG
-log4j.logger.com.sun.jersey.spi.container.servlet.WebComponent = ERROR
-log4j.logger.org.apache.hadoop = INFO
-log4j.logger.org.apache.hadoop.conf = WARN
-log4j.logger.org.apache.zookeeper = WARN
-log4j.logger.org.eclipse.jetty = INFO
diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml
new file mode 100644
index 0000000..13e02a0
--- /dev/null
+++ b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml
@@ -0,0 +1,74 @@
+
+
+
+
+
+
+ INFO
+ standard
+ .
+ webhcat.log
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/itests/pom.xml b/itests/pom.xml
index f156cc4..acce713 100644
--- a/itests/pom.xml
+++ b/itests/pom.xml
@@ -94,7 +94,7 @@
}
mkdir -p $DOWNLOAD_DIR
download "http://d3jw87u4immizc.cloudfront.net/spark-tarball/spark-${spark.version}-bin-hadoop2-without-hive.tgz" "spark"
- cp -f $HIVE_ROOT/data/conf/spark/log4j.properties $BASE_DIR/spark/conf/
+ cp -f $HIVE_ROOT/data/conf/spark/log4j2.xml $BASE_DIR/spark/conf/
sed '/package /d' ${basedir}/${hive.path.to.root}/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java > /tmp/UDFExampleAdd.java
javac -cp ${settings.localRepository}/org/apache/hive/hive-exec/${project.version}/hive-exec-${project.version}.jar /tmp/UDFExampleAdd.java -d /tmp
jar -cf /tmp/udfexampleadd-1.0.jar -C /tmp UDFExampleAdd.class
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index ee2cea0..5c60491 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -5913,7 +5913,7 @@ public static void main(String[] args) throws Throwable {
// If the log4j.configuration property hasn't already been explicitly set,
// use Hive's default log4j configuration
- if (System.getProperty("log4j.configuration") == null) {
+ if (System.getProperty("log4j.configurationFile") == null) {
// NOTE: It is critical to do this here so that log4j is reinitialized
// before any of the other core hive classes are loaded
try {
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
index ad99427..df42f1a 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
@@ -17,15 +17,11 @@
*/
package org.apache.hadoop.hive.metastore.txn;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.*;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNotNull;
+import static junit.framework.Assert.assertNull;
+import static junit.framework.Assert.assertTrue;
+import static junit.framework.Assert.fail;
import java.util.ArrayList;
import java.util.Arrays;
@@ -34,7 +30,29 @@
import java.util.SortedSet;
import java.util.TreeSet;
-import static junit.framework.Assert.*;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
+import org.apache.hadoop.hive.metastore.api.AddDynamicPartitions;
+import org.apache.hadoop.hive.metastore.api.CommitTxnRequest;
+import org.apache.hadoop.hive.metastore.api.CompactionRequest;
+import org.apache.hadoop.hive.metastore.api.CompactionType;
+import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse;
+import org.apache.hadoop.hive.metastore.api.LockComponent;
+import org.apache.hadoop.hive.metastore.api.LockLevel;
+import org.apache.hadoop.hive.metastore.api.LockRequest;
+import org.apache.hadoop.hive.metastore.api.LockResponse;
+import org.apache.hadoop.hive.metastore.api.LockState;
+import org.apache.hadoop.hive.metastore.api.LockType;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.OpenTxnRequest;
+import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse;
+import org.apache.hadoop.hive.metastore.api.ShowCompactRequest;
+import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
+import org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement;
+import org.apache.hadoop.hive.metastore.api.UnlockRequest;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
/**
* Tests for TxnHandler.
@@ -43,11 +61,9 @@
private HiveConf conf = new HiveConf();
private CompactionTxnHandler txnHandler;
- static final private Log LOG = LogFactory.getLog(TestCompactionTxnHandler.class);
public TestCompactionTxnHandler() throws Exception {
TxnDbUtil.setConfValues(conf);
- LogManager.getLogger(TxnHandler.class.getName()).setLevel(Level.DEBUG);
tearDown();
}
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
index f478184..6dc0bd3 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java
@@ -17,16 +17,11 @@
*/
package org.apache.hadoop.hive.metastore.txn;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.*;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertFalse;
+import static junit.framework.Assert.assertNull;
+import static junit.framework.Assert.assertTrue;
+import static junit.framework.Assert.fail;
import java.sql.Connection;
import java.sql.SQLException;
@@ -36,21 +31,66 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
-import static junit.framework.Assert.*;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
+import org.apache.hadoop.hive.metastore.api.CheckLockRequest;
+import org.apache.hadoop.hive.metastore.api.CommitTxnRequest;
+import org.apache.hadoop.hive.metastore.api.CompactionRequest;
+import org.apache.hadoop.hive.metastore.api.CompactionType;
+import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse;
+import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse;
+import org.apache.hadoop.hive.metastore.api.HeartbeatRequest;
+import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeRequest;
+import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse;
+import org.apache.hadoop.hive.metastore.api.LockComponent;
+import org.apache.hadoop.hive.metastore.api.LockLevel;
+import org.apache.hadoop.hive.metastore.api.LockRequest;
+import org.apache.hadoop.hive.metastore.api.LockResponse;
+import org.apache.hadoop.hive.metastore.api.LockState;
+import org.apache.hadoop.hive.metastore.api.LockType;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchLockException;
+import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
+import org.apache.hadoop.hive.metastore.api.OpenTxnRequest;
+import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse;
+import org.apache.hadoop.hive.metastore.api.ShowCompactRequest;
+import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
+import org.apache.hadoop.hive.metastore.api.ShowCompactResponseElement;
+import org.apache.hadoop.hive.metastore.api.ShowLocksRequest;
+import org.apache.hadoop.hive.metastore.api.ShowLocksResponse;
+import org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement;
+import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
+import org.apache.hadoop.hive.metastore.api.TxnInfo;
+import org.apache.hadoop.hive.metastore.api.TxnOpenException;
+import org.apache.hadoop.hive.metastore.api.TxnState;
+import org.apache.hadoop.hive.metastore.api.UnlockRequest;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Tests for TxnHandler.
*/
public class TestTxnHandler {
static final private String CLASS_NAME = TxnHandler.class.getName();
- static final private Log LOG = LogFactory.getLog(CLASS_NAME);
+ private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
private HiveConf conf = new HiveConf();
private TxnHandler txnHandler;
public TestTxnHandler() throws Exception {
TxnDbUtil.setConfValues(conf);
- LogManager.getLogger(TxnHandler.class.getName()).setLevel(Level.DEBUG);
+ LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ Configuration conf = ctx.getConfiguration();
+ conf.getLoggerConfig(CLASS_NAME).setLevel(Level.DEBUG);
+ ctx.updateLoggers(conf);
tearDown();
}
diff --git a/packaging/src/main/assembly/bin.xml b/packaging/src/main/assembly/bin.xml
index a1c176f..9fe9282 100644
--- a/packaging/src/main/assembly/bin.xml
+++ b/packaging/src/main/assembly/bin.xml
@@ -274,7 +274,7 @@
644webhcat-default.xml
- webhcat-log4j.properties
+ webhcat-log4j2.xmltruehcatalog/etc/webhcat
@@ -322,19 +322,19 @@
- ${project.parent.basedir}/common/src/main/resources/hive-log4j.properties
+ ${project.parent.basedir}/common/src/main/resources/hive-log4j2.xmlconf
- hive-log4j.properties.template
+ hive-log4j2.xml.template
- ${project.parent.basedir}/ql/src/main/resources/hive-exec-log4j.properties
+ ${project.parent.basedir}/ql/src/main/resources/hive-exec-log4j2.xmlconf
- hive-exec-log4j.properties.template
+ hive-exec-log4j2.xml.template
- ${project.parent.basedir}/beeline/src/main/resources/beeline-log4j.properties
+ ${project.parent.basedir}/beeline/src/main/resources/beeline-log4j2.xmlconf
- beeline-log4j.properties.template
+ beeline-log4j2.xml.template${project.parent.basedir}/hcatalog/README.txt
diff --git a/pom.xml b/pom.xml
index 1abf738..dac7f25 100644
--- a/pom.xml
+++ b/pom.xml
@@ -147,8 +147,7 @@
2.220.9.20.9.2
- 1.2.16
- 1.2.17
+ 2.32.31.9.52.0.0-M5
@@ -355,14 +354,19 @@
${junit.version}
- log4j
- log4j
- ${log4j.version}
+ org.apache.logging.log4j
+ log4j-1.2-api
+ ${log4j2.version}
- log4j
- apache-log4j-extras
- ${log4j-extras.version}
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j2.version}
+
+
+ org.apache.logging.log4j
+ log4j-jcl
+ ${log4j2.version}org.antlr
@@ -861,7 +865,7 @@
${maven.repo.local}local
- ${test.log4j.scheme}${test.tmp.dir}/conf/hive-log4j.properties
+ ${test.log4j.scheme}${test.tmp.dir}/conf/hive-log4j2.xmltrue${test.tmp.dir}
diff --git a/ql/pom.xml b/ql/pom.xml
index 6026c49..6e586c4 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -112,14 +112,34 @@
${javolution.version}
- log4j
- log4j
- ${log4j.version}
+ org.apache.logging.log4j
+ log4j-1.2-api
+ ${log4j2.version}
- log4j
- apache-log4j-extras
- ${log4j-extras.version}
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j2.version}
+
+
+ org.apache.logging.log4j
+ log4j-jcl
+ ${log4j2.version}
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+ ${log4j2.version}
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j2.version}
+
+
+ org.apache.logging.log4j
+ log4j-jcl
+ ${log4j2.version}org.antlr
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index a2cf712..82345ee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -27,7 +27,6 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
-import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
@@ -57,13 +56,14 @@
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.exec.tez.TezSessionState;
import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
+import org.apache.hadoop.hive.ql.exec.tez.TezSessionState;
import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.HiveKey;
import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
import org.apache.hadoop.hive.ql.io.IOPrepareCache;
+import org.apache.hadoop.hive.ql.log.NullAppender;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.MapWork;
@@ -88,11 +88,12 @@
import org.apache.hadoop.mapred.Partitioner;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Appender;
-import org.apache.log4j.BasicConfigurator;
-import org.apache.log4j.FileAppender;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.varia.NullAppender;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.appender.FileAppender;
+import org.apache.logging.log4j.core.appender.RollingFileAppender;
/**
* ExecDriver is the central class in co-ordinating execution of any map-reduce task.
@@ -687,8 +688,10 @@ public static void main(String[] args) throws IOException, HiveException {
if (noLog) {
// If started from main(), and noLog is on, we should not output
// any logs. To turn the log on, please set -Dtest.silent=false
- BasicConfigurator.resetConfiguration();
- BasicConfigurator.configure(new NullAppender());
+ Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
+ NullAppender appender = NullAppender.createNullAppender();
+ appender.addToLogger(logger.getName(), Level.ERROR);
+ appender.start();
} else {
setupChildLog4j(conf);
}
@@ -703,10 +706,12 @@ public static void main(String[] args) throws IOException, HiveException {
// print out the location of the log file for the user so
// that it's easy to find reason for local mode execution failures
- for (Appender appender : Collections.list((Enumeration) LogManager.getRootLogger()
- .getAllAppenders())) {
+ for (Appender appender : ((org.apache.logging.log4j.core.Logger) LogManager.getRootLogger())
+ .getAppenders().values()) {
if (appender instanceof FileAppender) {
- console.printInfo("Execution log at: " + ((FileAppender) appender).getFile());
+ console.printInfo("Execution log at: " + ((FileAppender) appender).getFileName());
+ } else if (appender instanceof RollingFileAppender) {
+ console.printInfo("Execution log at: " + ((RollingFileAppender) appender).getFileName());
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/HiveEventCounter.java b/ql/src/java/org/apache/hadoop/hive/ql/log/HiveEventCounter.java
new file mode 100644
index 0000000..1014986
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/log/HiveEventCounter.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.log;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * A log4J2 Appender that simply counts logging events in four levels:
+ * fatal, error, warn and info. The class name is used in log4j2.xml
+ */
+@Plugin(name = "HiveEventCounter", category = "Core", elementType = "appender", printObject = true)
+public class HiveEventCounter extends AbstractAppender {
+ private static LoggerContext context = (LoggerContext) LogManager.getContext(false);
+ private static Configuration configuration = context.getConfiguration();
+ private static final String APPENDER_NAME = "HiveEventCounter";
+ private static final int FATAL = 0;
+ private static final int ERROR = 1;
+ private static final int WARN = 2;
+ private static final int INFO = 3;
+
+ private static class EventCounts {
+ private final long[] counts = {0, 0, 0, 0};
+
+ private synchronized void incr(int i) {
+ ++counts[i];
+ }
+
+ private synchronized long get(int i) {
+ return counts[i];
+ }
+ }
+
+ private static EventCounts counts = new EventCounts();
+
+ protected HiveEventCounter(String name, Filter filter,
+ Layout extends Serializable> layout) {
+ super(name, filter, layout);
+ }
+
+ public static HiveEventCounter createInstance() {
+ return new HiveEventCounter(APPENDER_NAME, null, PatternLayout.createDefaultLayout());
+ }
+
+ @InterfaceAudience.Private
+ public static long getFatal() {
+ return counts.get(FATAL);
+ }
+
+ @InterfaceAudience.Private
+ public static long getError() {
+ return counts.get(ERROR);
+ }
+
+ @InterfaceAudience.Private
+ public static long getWarn() {
+ return counts.get(WARN);
+ }
+
+ @InterfaceAudience.Private
+ public static long getInfo() {
+ return counts.get(INFO);
+ }
+
+ @VisibleForTesting
+ public void addToLogger(String loggerName, Level level) {
+ LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName);
+ loggerConfig.addAppender(this, level, null);
+ context.updateLoggers();
+ }
+
+ @VisibleForTesting
+ public void removeFromLogger(String loggerName) {
+ LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName);
+ loggerConfig.removeAppender(APPENDER_NAME);
+ context.updateLoggers();
+ }
+
+ public void append(LogEvent event) {
+ Level level = event.getLevel();
+ if (level.equals(Level.INFO)) {
+ counts.incr(INFO);
+ } else if (level.equals(Level.WARN)) {
+ counts.incr(WARN);
+ } else if (level.equals(Level.ERROR)) {
+ counts.incr(ERROR);
+ } else if (level.equals(Level.FATAL)) {
+ counts.incr(FATAL);
+ }
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/NullAppender.java b/ql/src/java/org/apache/hadoop/hive/ql/log/NullAppender.java
new file mode 100644
index 0000000..7d0f4f5
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/log/NullAppender.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.log;
+
+import java.io.Serializable;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+
+/**
+ * A NullAppender merely exists, it never outputs a message to any device.
+ */
+@Plugin(name = "NullAppender", category = "Core", elementType = "appender", printObject = false)
+public class NullAppender extends AbstractAppender {
+
+ private static LoggerContext context = (LoggerContext) LogManager.getContext(false);
+ private static Configuration configuration = context.getConfiguration();
+
+ protected NullAppender(String name, Filter filter,
+ Layout extends Serializable> layout, boolean ignoreExceptions) {
+ super(name, filter, layout, ignoreExceptions);
+ }
+
+ public static NullAppender createNullAppender() {
+ return new NullAppender("NullAppender", null, PatternLayout.createDefaultLayout(), true);
+ }
+
+ public void addToLogger(String loggerName, Level level) {
+ LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName);
+ loggerConfig.addAppender(this, level, null);
+ context.updateLoggers();
+ }
+
+ public void append(LogEvent event) {
+ // no-op
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/PidDailyRollingFileAppender.java b/ql/src/java/org/apache/hadoop/hive/ql/log/PidDailyRollingFileAppender.java
deleted file mode 100644
index 6a59d4a..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/log/PidDailyRollingFileAppender.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.log;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.RuntimeMXBean;
-
-import org.apache.log4j.DailyRollingFileAppender;
-
-public class PidDailyRollingFileAppender extends DailyRollingFileAppender {
-
- @Override
- public void setFile(String file) {
- RuntimeMXBean rt = ManagementFactory.getRuntimeMXBean();
- super.setFile(file + '.' + rt.getName());
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java
new file mode 100644
index 0000000..898e056
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/log/PidFilePatternConverter.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.log;
+
+import java.lang.management.ManagementFactory;
+
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.pattern.AbstractPatternConverter;
+import org.apache.logging.log4j.core.pattern.ArrayPatternConverter;
+import org.apache.logging.log4j.core.pattern.ConverterKeys;
+
+/**
+ * FilePattern converter that converts %pid pattern to @ information
+ * obtained at runtime.
+ *
+ * Example usage:
+ *
+ *
+ * Will generate output file with name containing @ like below
+ * test.log.95232@localhost.gz
+ */
+@Plugin(name = "PidFilePatternConverter", category = "FileConverter")
+@ConverterKeys({ "pid" })
+public class PidFilePatternConverter extends AbstractPatternConverter implements
+ ArrayPatternConverter {
+
+ /**
+ * Private constructor.
+ */
+ private PidFilePatternConverter() {
+ super("pid", "pid");
+ }
+
+ public static PidFilePatternConverter newInstance() {
+ return new PidFilePatternConverter();
+ }
+
+ public void format(StringBuilder toAppendTo, Object... objects) {
+ toAppendTo.append(ManagementFactory.getRuntimeMXBean().getName());
+ }
+
+ public void format(Object obj, StringBuilder toAppendTo) {
+ toAppendTo.append(ManagementFactory.getRuntimeMXBean().getName());
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/StringAppender.java b/ql/src/java/org/apache/hadoop/hive/ql/log/StringAppender.java
new file mode 100644
index 0000000..551f0ca
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/log/StringAppender.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.log;
+
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Serializable;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.AbstractOutputStreamAppender;
+import org.apache.logging.log4j.core.appender.OutputStreamManager;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * Log4j2 appender that writers to in-memory string object.
+ */
+@Plugin(name = "StringAppender", category = "Core", elementType = "appender", printObject = true)
+public class StringAppender
+ extends AbstractOutputStreamAppender {
+
+ private static final String APPENDER_NAME = "StringAppender";
+ private static LoggerContext context = (LoggerContext) LogManager.getContext(false);
+ private static Configuration configuration = context.getConfiguration();
+ private StringOutputStreamManager manager;
+
+ /**
+ * Instantiate a WriterAppender and set the output destination to a
+ * new {@link OutputStreamWriter} initialized with os
+ * as its {@link OutputStream}.
+ *
+ * @param name The name of the Appender.
+ * @param layout The layout to format the message.
+ * @param filter
+ * @param ignoreExceptions
+ * @param immediateFlush
+ * @param manager The OutputStreamManager.
+ */
+ protected StringAppender(String name,
+ Layout extends Serializable> layout, Filter filter,
+ boolean ignoreExceptions, boolean immediateFlush,
+ StringOutputStreamManager manager) {
+ super(name, layout, filter, ignoreExceptions, immediateFlush, manager);
+ this.manager = manager;
+ }
+
+ public static StringAppender createStringAppender(
+ @PluginAttribute("name") String nullablePatternString) {
+ PatternLayout layout;
+ if (nullablePatternString == null) {
+ layout = PatternLayout.createDefaultLayout();
+ } else {
+ layout = PatternLayout.createLayout(nullablePatternString, configuration,
+ null, null, true, false, null, null);
+ }
+
+ return new StringAppender(APPENDER_NAME, layout, null, false, true,
+ new StringOutputStreamManager(new ByteArrayOutputStream(), "StringStream", layout));
+ }
+
+ @VisibleForTesting
+ public void addToLogger(String loggerName, Level level) {
+ LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName);
+ loggerConfig.addAppender(this, level, null);
+ context.updateLoggers();
+ }
+
+ @VisibleForTesting
+ public void removeFromLogger(String loggerName) {
+ LoggerConfig loggerConfig = configuration.getLoggerConfig(loggerName);
+ loggerConfig.removeAppender(APPENDER_NAME);
+ context.updateLoggers();
+ }
+
+ public String getOutput() {
+ manager.flush();
+ return new String(manager.getStream().toByteArray());
+ }
+
+ public void reset() {
+ manager.reset();
+ }
+
+ protected static class StringOutputStreamManager extends OutputStreamManager {
+ ByteArrayOutputStream stream;
+
+ protected StringOutputStreamManager(ByteArrayOutputStream os, String streamName,
+ Layout> layout) {
+ super(os, streamName, layout);
+ stream = os;
+ }
+
+ public ByteArrayOutputStream getStream() {
+ return stream;
+ }
+
+ public void reset() {
+ stream.reset();
+ }
+ }
+}
diff --git a/ql/src/main/resources/hive-exec-log4j.properties b/ql/src/main/resources/hive-exec-log4j.properties
deleted file mode 100644
index 9eaa6b6..0000000
--- a/ql/src/main/resources/hive-exec-log4j.properties
+++ /dev/null
@@ -1,77 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Define some default values that can be overridden by system properties
-hive.log.threshold=ALL
-hive.root.logger=INFO,FA
-hive.log.dir=${java.io.tmpdir}/${user.name}
-hive.query.id=hadoop
-hive.log.file=${hive.query.id}.log
-
-# Define the root logger to the system property "hadoop.root.logger".
-log4j.rootLogger=${hive.root.logger}, EventCounter
-
-# Logging Threshold
-log4j.threshhold=${hive.log.threshold}
-
-#
-# File Appender
-#
-
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.File=${hive.log.dir}/${hive.log.file}
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-
-# Pattern format: Date LogLevel LoggerName LogMessage
-#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-# Debugging Pattern format
-log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
-
-
-#
-# console
-# Add "console" to rootlogger above if you want to use this
-#
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t] %p %c{2}: %m%n
-
-#custom logging levels
-#log4j.logger.xxx=DEBUG
-
-#
-# Event Counter Appender
-# Sends counts of logging messages at different severity levels to Hadoop Metrics.
-#
-log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
-
-
-log4j.category.DataNucleus=ERROR,FA
-log4j.category.Datastore=ERROR,FA
-log4j.category.Datastore.Schema=ERROR,FA
-log4j.category.JPOX.Datastore=ERROR,FA
-log4j.category.JPOX.Plugin=ERROR,FA
-log4j.category.JPOX.MetaData=ERROR,FA
-log4j.category.JPOX.Query=ERROR,FA
-log4j.category.JPOX.General=ERROR,FA
-log4j.category.JPOX.Enhancer=ERROR,FA
-
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,FA
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,FA
diff --git a/ql/src/main/resources/hive-exec-log4j2.xml b/ql/src/main/resources/hive-exec-log4j2.xml
new file mode 100644
index 0000000..7fc7183
--- /dev/null
+++ b/ql/src/main/resources/hive-exec-log4j2.xml
@@ -0,0 +1,113 @@
+
+
+
+
+
+
+ ALL
+ INFO
+ FA
+ ${sys:java.io.tmpdir}/${sys:user.name}
+ hadoop
+ ${hive.query.id}.log
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java b/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java
new file mode 100644
index 0000000..2f9c3d3
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/log/TestLog4j2Appenders.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.log;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class TestLog4j2Appenders {
+
+ @Before
+ public void setup() {
+ // programmatically set root logger level to INFO. By default if log4j2-test.xml is not
+ // available root logger will use ERROR log level
+ LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ Configuration config = ctx.getConfiguration();
+ LoggerConfig loggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME);
+ loggerConfig.setLevel(Level.INFO);
+ ctx.updateLoggers();
+ }
+
+ @Test
+ public void testStringAppender() throws Exception {
+ // Get the RootLogger which, if you don't have log4j2-test.xml defined, will only log ERRORs
+ Logger logger = LogManager.getRootLogger();
+ // Create a String Appender to capture log output
+ StringAppender appender = StringAppender.createStringAppender("%m");
+ appender.addToLogger(logger.getName(), Level.INFO);
+ appender.start();
+
+ // Log to the string appender
+ logger.info("Hello!");
+ logger.info(" World");
+
+ assertEquals("Hello! World", appender.getOutput());
+ appender.removeFromLogger(LogManager.getRootLogger().getName());
+ }
+
+ @Test
+ public void testHiveEventCounterAppender() throws Exception {
+ Logger logger = LogManager.getRootLogger();
+ HiveEventCounter appender = HiveEventCounter.createInstance();
+ appender.addToLogger(logger.getName(), Level.INFO);
+ appender.start();
+
+ logger.info("Test");
+ logger.info("Test");
+ logger.info("Test");
+ logger.info("Test");
+
+ logger.error("Test");
+ logger.error("Test");
+ logger.error("Test");
+
+ logger.warn("Test");
+ logger.warn("Test");
+
+ logger.fatal("Test");
+ assertEquals(4, appender.getInfo());
+ assertEquals(3, appender.getError());
+ assertEquals(2, appender.getWarn());
+ assertEquals(1, appender.getFatal());
+ appender.removeFromLogger(LogManager.getRootLogger().getName());
+ }
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
index 99fbd5d..8c51946 100755
--- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
@@ -20,7 +20,6 @@
import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
-import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -29,8 +28,6 @@
import java.util.Map;
import java.util.regex.Pattern;
-import junit.framework.TestCase;
-
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -46,6 +43,7 @@
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.ql.index.HiveIndex;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.log.StringAppender;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
@@ -56,15 +54,19 @@
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.util.StringUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
-import org.apache.log4j.WriterAppender;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.junit.Assert;
import com.google.common.collect.ImmutableMap;
+import junit.framework.TestCase;
+
/**
* TestHive.
*
@@ -248,36 +250,39 @@ public void testThriftTable() throws Throwable {
* @throws Throwable
*/
public void testMetaStoreApiTiming() throws Throwable {
- // set log level to DEBUG, as this is logged at debug level
- Logger logger = Logger.getLogger("hive.ql.metadata.Hive");
- Level origLevel = logger.getLevel();
- logger.setLevel(Level.DEBUG);
-
- // create an appender to capture the logs in a string
- StringWriter writer = new StringWriter();
- WriterAppender appender = new WriterAppender(new PatternLayout(), writer);
+ // Get the RootLogger which, if you don't have log4j2-test.xml defined, will only log ERRORs
+ Logger logger = LogManager.getLogger("hive.ql.metadata.Hive");
+ Level oldLevel = logger.getLevel();
+ LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ Configuration config = ctx.getConfiguration();
+ LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName());
+ loggerConfig.setLevel(Level.DEBUG);
+ ctx.updateLoggers();
+
+ // Create a String Appender to capture log output
+ StringAppender appender = StringAppender.createStringAppender("%m");
+ appender.addToLogger(logger.getName(), Level.DEBUG);
+ appender.start();
try {
- logger.addAppender(appender);
-
hm.clearMetaCallTiming();
hm.getAllDatabases();
hm.dumpAndClearMetaCallTiming("test");
- String logStr = writer.toString();
+ String logStr = appender.getOutput();
String expectedString = "getAllDatabases_()=";
Assert.assertTrue(logStr + " should contain <" + expectedString,
logStr.contains(expectedString));
// reset the log buffer, verify new dump without any api call does not contain func
- writer.getBuffer().setLength(0);
+ appender.reset();
hm.dumpAndClearMetaCallTiming("test");
- logStr = writer.toString();
+ logStr = appender.getOutput();
Assert.assertFalse(logStr + " should not contain <" + expectedString,
logStr.contains(expectedString));
-
} finally {
- logger.setLevel(origLevel);
- logger.removeAppender(appender);
+ loggerConfig.setLevel(oldLevel);
+ ctx.updateLoggers();
+ appender.removeFromLogger(logger.getName());
}
}
diff --git a/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java b/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
index 9d64b10..5ba8c16 100644
--- a/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
+++ b/service/src/java/org/apache/hive/service/cli/CLIServiceUtils.java
@@ -18,8 +18,13 @@
package org.apache.hive.service.cli;
-import org.apache.log4j.Layout;
-import org.apache.log4j.PatternLayout;
+import java.io.Serializable;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.layout.PatternLayout;
/**
* CLIServiceUtils.
@@ -29,10 +34,12 @@
private static final char SEARCH_STRING_ESCAPE = '\\';
- public static final Layout verboseLayout = new PatternLayout(
- "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n");
- public static final Layout nonVerboseLayout = new PatternLayout(
- "%-5p : %m%n");
+ private static LoggerContext context = (LoggerContext) LogManager.getContext(false);
+ private static Configuration configuration = context.getConfiguration();
+ public static final Layout extends Serializable> verboseLayout = PatternLayout.createLayout(
+ "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n", configuration, null, null, true, false, null, null);
+ public static final Layout extends Serializable> nonVerboseLayout = PatternLayout.createLayout(
+ "%-5p : %m%n", configuration, null, null, true, false, null, null);
/**
* Convert a SQL search pattern into an equivalent Java Regex.
diff --git a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
index 70340bd..f8e5760 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java
@@ -6,44 +6,72 @@
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
package org.apache.hive.service.cli.operation;
-import java.io.CharArrayWriter;
-import java.util.Enumeration;
+
+import java.io.ByteArrayOutputStream;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Serializable;
import java.util.regex.Pattern;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.session.OperationLog;
-import org.apache.hadoop.hive.ql.session.OperationLog.LoggingLevel;
import org.apache.hive.service.cli.CLIServiceUtils;
-import org.apache.log4j.Appender;
-import org.apache.log4j.ConsoleAppender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.Logger;
-import org.apache.log4j.WriterAppender;
-import org.apache.log4j.spi.Filter;
-import org.apache.log4j.spi.LoggingEvent;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.appender.AbstractOutputStreamAppender;
+import org.apache.logging.log4j.core.appender.ConsoleAppender;
+import org.apache.logging.log4j.core.appender.OutputStreamManager;
+import org.apache.logging.log4j.core.filter.AbstractFilter;
import com.google.common.base.Joiner;
/**
- * An Appender to divert logs from individual threads to the LogObject they belong to.
+ *
*/
-public class LogDivertAppender extends WriterAppender {
- private static final Logger LOG = Logger.getLogger(LogDivertAppender.class.getName());
+public class LogDivertAppender extends AbstractOutputStreamAppender {
+ private static final Logger LOG = LogManager.getLogger();
private final OperationManager operationManager;
private boolean isVerbose;
private Layout verboseLayout;
+ private StringOutputStreamManager manager;
+
+ /**
+ * Instantiate a WriterAppender and set the output destination to a
+ * new {@link OutputStreamWriter} initialized with os
+ * as its {@link OutputStream}.
+ * @param name The name of the Appender.
+ * @param layout The layout to format the message.
+ * @param filter
+ * @param ignoreExceptions
+ * @param immediateFlush
+ * @param manager The OutputStreamManager.
+ * @param isVerbose
+ * @param operationManager
+ */
+ protected LogDivertAppender(String name,
+ Layout extends Serializable> layout, Filter filter, boolean ignoreExceptions,
+ boolean immediateFlush,
+ StringOutputStreamManager manager, boolean isVerbose, OperationManager operationManager) {
+ super(name, layout, filter, ignoreExceptions, immediateFlush, manager);
+ this.operationManager = operationManager;
+ this.verboseLayout = isVerbose ? layout : CLIServiceUtils.verboseLayout;
+ this.manager = manager;
+ }
/**
* A log filter that filters messages coming from the logger with the given names.
@@ -52,31 +80,31 @@
* they don't generate more logs for themselves when they process logs.
* White list filter is used for less verbose log collection
*/
- private static class NameFilter extends Filter {
+ private static class NameFilter extends AbstractFilter {
private Pattern namePattern;
- private LoggingLevel loggingMode;
+ private OperationLog.LoggingLevel loggingMode;
private OperationManager operationManager;
/* Patterns that are excluded in verbose logging level.
* Filter out messages coming from log processing classes, or we'll run an infinite loop.
*/
private static final Pattern verboseExcludeNamePattern = Pattern.compile(Joiner.on("|").
- join(new String[] {LOG.getName(), OperationLog.class.getName(),
- OperationManager.class.getName()}));
+ join(new String[]{LOG.getName(), OperationLog.class.getName(),
+ OperationManager.class.getName()}));
/* Patterns that are included in execution logging level.
* In execution mode, show only select logger messages.
*/
private static final Pattern executionIncludeNamePattern = Pattern.compile(Joiner.on("|").
- join(new String[] {"org.apache.hadoop.mapreduce.JobSubmitter",
- "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName(),
- "org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor"}));
+ join(new String[]{"org.apache.hadoop.mapreduce.JobSubmitter",
+ "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName(),
+ "org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor"}));
/* Patterns that are included in performance logging level.
* In performance mode, show execution and performance logger messages.
*/
private static final Pattern performanceIncludeNamePattern = Pattern.compile(
- executionIncludeNamePattern.pattern() + "|" + PerfLogger.class.getName());
+ executionIncludeNamePattern.pattern() + "|" + PerfLogger.class.getName());
private void setCurrentNamePattern(OperationLog.LoggingLevel mode) {
if (mode == OperationLog.LoggingLevel.VERBOSE) {
@@ -88,26 +116,25 @@ private void setCurrentNamePattern(OperationLog.LoggingLevel mode) {
}
}
- public NameFilter(
- OperationLog.LoggingLevel loggingMode, OperationManager op) {
+ public NameFilter(OperationLog.LoggingLevel loggingMode, OperationManager op) {
this.operationManager = op;
this.loggingMode = loggingMode;
setCurrentNamePattern(loggingMode);
}
@Override
- public int decide(LoggingEvent ev) {
+ public Result filter(LogEvent event) {
OperationLog log = operationManager.getOperationLogByThread();
boolean excludeMatches = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
if (log == null) {
- return Filter.DENY;
+ return Result.DENY;
}
OperationLog.LoggingLevel currentLoggingMode = log.getOpLoggingLevel();
// If logging is disabled, deny everything.
if (currentLoggingMode == OperationLog.LoggingLevel.NONE) {
- return Filter.DENY;
+ return Result.DENY;
}
// Look at the current session's setting
// and set the pattern and excludeMatches accordingly.
@@ -116,88 +143,64 @@ public int decide(LoggingEvent ev) {
setCurrentNamePattern(loggingMode);
}
- boolean isMatch = namePattern.matcher(ev.getLoggerName()).matches();
+ boolean isMatch = namePattern.matcher(event.getLoggerName()).matches();
if (excludeMatches == isMatch) {
// Deny if this is black-list filter (excludeMatches = true) and it
// matched
// or if this is whitelist filter and it didn't match
- return Filter.DENY;
+ return Result.DENY;
}
- return Filter.NEUTRAL;
+ return Result.NEUTRAL;
}
}
- /** This is where the log message will go to */
- private final CharArrayWriter writer = new CharArrayWriter();
-
- private void setLayout (boolean isVerbose, Layout lo) {
- if (isVerbose) {
- if (lo == null) {
- lo = CLIServiceUtils.verboseLayout;
- LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern.");
- }
- } else {
- lo = CLIServiceUtils.nonVerboseLayout;
- }
- setLayout(lo);
- }
-
- private void initLayout(boolean isVerbose) {
+ private static Layout initLayout(boolean isVerbose) {
// There should be a ConsoleAppender. Copy its Layout.
- Logger root = Logger.getRootLogger();
+ Logger root = LogManager.getRootLogger();
Layout layout = null;
- Enumeration> appenders = root.getAllAppenders();
- while (appenders.hasMoreElements()) {
- Appender ap = (Appender) appenders.nextElement();
+ for (Appender ap : ((org.apache.logging.log4j.core.Logger) root).getAppenders().values()) {
if (ap.getClass().equals(ConsoleAppender.class)) {
layout = ap.getLayout();
break;
}
}
- setLayout(isVerbose, layout);
- }
- public LogDivertAppender(OperationManager operationManager,
- OperationLog.LoggingLevel loggingMode) {
- isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
- initLayout(isVerbose);
- setWriter(writer);
- setName("LogDivertAppender");
- this.operationManager = operationManager;
- this.verboseLayout = isVerbose ? layout : CLIServiceUtils.verboseLayout;
- addFilter(new NameFilter(loggingMode, operationManager));
- }
+ if (isVerbose) {
+ if (layout == null) {
+ layout = CLIServiceUtils.verboseLayout;
+ LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern.");
+ }
+ } else {
+ layout = CLIServiceUtils.nonVerboseLayout;
+ }
- @Override
- public void doAppend(LoggingEvent event) {
- OperationLog log = operationManager.getOperationLogByThread();
+ return layout;
+ }
- // Set current layout depending on the verbose/non-verbose mode.
- if (log != null) {
- boolean isCurrModeVerbose = (log.getOpLoggingLevel() == OperationLog.LoggingLevel.VERBOSE);
+ public static LogDivertAppender createInstance(OperationManager operationManager,
+ OperationLog.LoggingLevel loggingMode) {
+ boolean isVerbose = (loggingMode == OperationLog.LoggingLevel.VERBOSE);
+ Layout layout = initLayout(isVerbose);
+ Filter filter = new NameFilter(loggingMode, operationManager);
+ return new LogDivertAppender("LogDivertAppender", layout, filter, false, true,
+ new StringOutputStreamManager(new ByteArrayOutputStream(), "StringStream", layout),
+ isVerbose, operationManager);
+ }
- // If there is a logging level change from verbose->non-verbose or vice-versa since
- // the last subAppend call, change the layout to preserve consistency.
- if (isCurrModeVerbose != isVerbose) {
- isVerbose = isCurrModeVerbose;
- setLayout(isVerbose, verboseLayout);
- }
- }
- super.doAppend(event);
+ public String getOutput() {
+ return new String(manager.getStream().toByteArray());
}
- /**
- * Overrides WriterAppender.subAppend(), which does the real logging. No need
- * to worry about concurrency since log4j calls this synchronously.
- */
@Override
- protected void subAppend(LoggingEvent event) {
- super.subAppend(event);
- // That should've gone into our writer. Notify the LogContext.
- String logOutput = writer.toString();
- writer.reset();
+ public void append(LogEvent event) {
+ super.append(event);
+
+ // FIXME: check if log verbosity has changed
+
+ String logOutput = getOutput();
+ manager.reset();
OperationLog log = operationManager.getOperationLogByThread();
if (log == null) {
@@ -206,4 +209,22 @@ protected void subAppend(LoggingEvent event) {
}
log.writeOperationLog(logOutput);
}
+
+ protected static class StringOutputStreamManager extends OutputStreamManager {
+ ByteArrayOutputStream stream;
+
+ protected StringOutputStreamManager(ByteArrayOutputStream os, String streamName,
+ Layout> layout) {
+ super(os, streamName, layout);
+ stream = os;
+ }
+
+ public ByteArrayOutputStream getStream() {
+ return stream;
+ }
+
+ public void reset() {
+ stream.reset();
+ }
+ }
}
diff --git a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
index 9b0a519..df59857 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
@@ -41,8 +41,12 @@
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
-import org.apache.log4j.Appender;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
/**
* OperationManager.
@@ -50,7 +54,8 @@
*/
public class OperationManager extends AbstractService {
private final Log LOG = LogFactory.getLog(OperationManager.class.getName());
-
+ private LoggerContext context = (LoggerContext) LogManager.getContext(false);
+ private Configuration configuration = context.getConfiguration();
private final Map handleToOperation =
new HashMap();
@@ -83,8 +88,11 @@ public synchronized void stop() {
private void initOperationLogCapture(String loggingMode) {
// Register another Appender (with the same layout) that talks to us.
- Appender ap = new LogDivertAppender(this, OperationLog.getLoggingLevel(loggingMode));
- Logger.getRootLogger().addAppender(ap);
+ Appender ap = LogDivertAppender.createInstance(this, OperationLog.getLoggingLevel(loggingMode));
+ LoggerConfig loggerConfig = configuration.getLoggerConfig(LogManager.getLogger().getName());
+ loggerConfig.addAppender(ap, Level.DEBUG, null);
+ context.updateLoggers();
+ ap.start();
}
public ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession,
diff --git a/shims/common/pom.xml b/shims/common/pom.xml
index 9e9a3b7..dfdec2b 100644
--- a/shims/common/pom.xml
+++ b/shims/common/pom.xml
@@ -41,14 +41,19 @@
${commons-logging.version}
- log4j
- log4j
- ${log4j.version}
+ org.apache.logging.log4j
+ log4j-1.2-api
+ ${log4j2.version}
- log4j
- apache-log4j-extras
- ${log4j-extras.version}
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j2.version}
+
+
+ org.apache.logging.log4j
+ log4j-jcl
+ ${log4j2.version}com.google.guava
diff --git a/spark-client/src/test/resources/log4j.properties b/spark-client/src/test/resources/log4j.properties
deleted file mode 100644
index 93a60cc..0000000
--- a/spark-client/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Set everything to be logged to the file target/unit-tests.log
-log4j.rootCategory=DEBUG, console
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.target=System.err
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
diff --git a/spark-client/src/test/resources/log4j2.xml b/spark-client/src/test/resources/log4j2.xml
new file mode 100644
index 0000000..cd9cbaf
--- /dev/null
+++ b/spark-client/src/test/resources/log4j2.xml
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+ DEBUG
+ console
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/testutils/ptest2/src/main/resources/log4j.properties b/testutils/ptest2/src/main/resources/log4j.properties
deleted file mode 100644
index edb9696..0000000
--- a/testutils/ptest2/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-hive.ptest.logdir=target
-
-log4j.rootLogger=DEBUG,FILE
-log4j.threshhold=ALL
-
-log4j.appender.FILE=org.apache.log4j.RollingFileAppender
-log4j.appender.FILE.File=${hive.ptest.logdir}/ptest.log
-log4j.appender.FILE.MaxFileSize=50MB
-log4j.appender.FILE.MaxBackupIndex=1
-log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
-log4j.appender.FILE.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
-
-log4j.logger.org.apache.http=INFO
-log4j.logger.org.springframework=INFO
-log4j.logger.org.jclouds=INFO
-log4j.logger.jclouds=INFO
-log4j.logger.org.apache.hive=DEBUG
-log4j.logger.org.apache.http=TRACE
-
-# Silence useless ZK logs
-log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN
-log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN
diff --git a/testutils/ptest2/src/main/resources/log4j2.xml b/testutils/ptest2/src/main/resources/log4j2.xml
new file mode 100644
index 0000000..800c90f
--- /dev/null
+++ b/testutils/ptest2/src/main/resources/log4j2.xml
@@ -0,0 +1,78 @@
+
+
+
+
+
+
+ DEBUG
+ FILE
+ target
+ ptest.log
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+