diff --git a/cli/pom.xml b/cli/pom.xml
index 76f6d11..6f2e664 100644
--- a/cli/pom.xml
+++ b/cli/pom.xml
@@ -82,6 +82,12 @@
test
+ com.lmax
+ disruptor
+ ${disruptor.version}
+ test
+
+
jline
jline
${jline.version}
diff --git a/common/pom.xml b/common/pom.xml
index 1b9b4bc..d9934fc 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -164,6 +164,12 @@
test
+ com.lmax
+ disruptor
+ ${disruptor.version}
+ test
+
+
org.json
json
${json.version}
diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
index adcf805..599e798 100644
--- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
@@ -21,12 +21,18 @@
import java.io.File;
import java.net.URL;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.config.Configurator;
+import org.apache.logging.log4j.core.impl.Log4jContextFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.annotations.VisibleForTesting;
+
/**
* Utilities common to logging operations.
*/
@@ -66,8 +72,14 @@ public static String initHiveExecLog4j()
}
private static String initHiveLog4jCommon(ConfVars confVarName)
- throws LogInitializationException {
+ throws LogInitializationException {
HiveConf conf = new HiveConf();
+ return initHiveLog4jCommon(conf, confVarName);
+ }
+
+ @VisibleForTesting
+ public static String initHiveLog4jCommon(HiveConf conf, ConfVars confVarName)
+ throws LogInitializationException {
if (HiveConf.getVar(conf, confVarName).equals("")) {
// if log4j configuration file not set, or could not found, use default setting
return initHiveLog4jDefault(conf, "", confVarName);
@@ -91,13 +103,28 @@ private static String initHiveLog4jCommon(ConfVars confVarName)
}
System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
}
+ final boolean async = checkAndSetAsyncLogging(conf);
Configurator.initialize(null, log4jFileName);
logConfigLocation(conf);
- return ("Logging initialized using configuration in " + log4jConfigFile);
+ return "Logging initialized using configuration in " + log4jConfigFile + " Async: " + async;
}
}
}
+ public static boolean checkAndSetAsyncLogging(final Configuration conf) {
+ final boolean asyncLogging = HiveConf.getBoolVar(conf, ConfVars.HIVE_ASYNC_LOG_ENABLED);
+ if (asyncLogging) {
+ System.setProperty("Log4jContextSelector",
+ "org.apache.logging.log4j.core.async.AsyncLoggerContextSelector");
+ // default is ClassLoaderContextSelector which is created during automatic logging
+ // initialization in a static initialization block.
+ // Changing ContextSelector at runtime requires creating new context factory which will
+ // internally create new context selector based on system property.
+ LogManager.setFactory(new Log4jContextFactory());
+ }
+ return asyncLogging;
+ }
+
private static String initHiveLog4jDefault(
HiveConf conf, String logMessage, ConfVars confVarName)
throws LogInitializationException {
@@ -118,9 +145,11 @@ private static String initHiveLog4jDefault(
break;
}
if (hive_l4j != null) {
+ final boolean async = checkAndSetAsyncLogging(conf);
Configurator.initialize(null, hive_l4j.toString());
logConfigLocation(conf);
- return (logMessage + "\n" + "Logging initialized using configuration in " + hive_l4j);
+ return (logMessage + "\n" + "Logging initialized using configuration in " + hive_l4j +
+ " Async: " + async);
} else {
throw new LogInitializationException(
logMessage + "Unable to initialize logging using "
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 95c5c0e..ecc2495 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1937,6 +1937,12 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal
"If the property is not set, then logging will be initialized using hive-exec-log4j2.properties found on the classpath.\n" +
"If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.xml\"), \n" +
"which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."),
+ HIVE_ASYNC_LOG_ENABLED("hive.async.log.enabled", true,
+ "Whether to enable Log4j2's asynchronous logging. Asynchronous logging can give\n" +
+ " significant performance improvement as logging will be handled in separate thread\n" +
+ " that uses LMAX disruptor queue for buffering log messages.\n" +
+ " Refer https://logging.apache.org/log4j/2.x/manual/async.html for benefits and\n" +
+ " drawbacks."),
HIVE_LOG_EXPLAIN_OUTPUT("hive.log.explain.output", false,
"Whether to log explain output for every query.\n" +
diff --git a/common/src/main/resources/hive-log4j2.properties b/common/src/main/resources/hive-log4j2.properties
index 12cd9ac..9d4d9b4 100644
--- a/common/src/main/resources/hive-log4j2.properties
+++ b/common/src/main/resources/hive-log4j2.properties
@@ -35,8 +35,10 @@ appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
# daily rolling file appender
-appender.DRFA.type = RollingFile
+appender.DRFA.type = RollingRandomAccessFile
appender.DRFA.name = DRFA
+appender.DRFA.append = true
+appender.DRFA.immediateFlush = false
appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
# Use %pid in the filePattern to append @ to the filename if you want separate log files for different CLI session
appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
@@ -69,5 +71,6 @@ logger.JPOX.level = ERROR
# root logger
rootLogger.level = ${sys:hive.log.level}
+rootLogger.includeLocation = false
rootLogger.appenderRefs = root
rootLogger.appenderRef.root.ref = ${sys:hive.root.logger}
diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java
new file mode 100644
index 0000000..e2631cf
--- /dev/null
+++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.conf;
+
+import org.apache.hadoop.hive.common.LogUtils;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.async.AsyncLoggerContextSelector;
+import org.apache.logging.log4j.core.impl.Log4jContextFactory;
+import org.apache.logging.log4j.core.selector.ClassLoaderContextSelector;
+import org.apache.logging.log4j.core.selector.ContextSelector;
+import org.junit.Test;
+
+import junit.framework.TestCase;
+
+public class TestHiveAsyncLogging extends TestCase {
+
+ // this test requires disruptor jar in classpath
+ @Test
+ public void testAsyncLoggingInitialization() throws Exception {
+ HiveConf conf = new HiveConf();
+ conf.setBoolVar(ConfVars.HIVE_ASYNC_LOG_ENABLED, false);
+ LogUtils.initHiveLog4jCommon(conf, ConfVars.HIVE_LOG4J_FILE);
+ Log4jContextFactory log4jContextFactory = (Log4jContextFactory) LogManager.getFactory();
+ ContextSelector contextSelector = log4jContextFactory.getSelector();
+ assertTrue(contextSelector instanceof ClassLoaderContextSelector);
+
+ conf.setBoolVar(ConfVars.HIVE_ASYNC_LOG_ENABLED, true);
+ LogUtils.initHiveLog4jCommon(conf, ConfVars.HIVE_LOG4J_FILE);
+ log4jContextFactory = (Log4jContextFactory) LogManager.getFactory();
+ contextSelector = log4jContextFactory.getSelector();
+ assertTrue(contextSelector instanceof AsyncLoggerContextSelector);
+ }
+}
diff --git a/data/conf/hive-log4j2.properties b/data/conf/hive-log4j2.properties
index 6bace1f..edaf546 100644
--- a/data/conf/hive-log4j2.properties
+++ b/data/conf/hive-log4j2.properties
@@ -35,8 +35,10 @@ appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
# daily rolling file appender
-appender.DRFA.type = RollingFile
+appender.DRFA.type = RollingRandomAccessFile
appender.DRFA.name = DRFA
+appender.DRFA.append = true
+appender.DRFA.immediateFlush = false
appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
appender.DRFA.layout.type = PatternLayout
@@ -119,5 +121,6 @@ logger.ObjectStore.level = INFO
# root logger
rootLogger.level = ${sys:hive.log.level}
+rootLogger.includeLocation = false
rootLogger.appenderRefs = root
rootLogger.appenderRef.root.ref = ${sys:hive.root.logger}
diff --git a/hcatalog/core/pom.xml b/hcatalog/core/pom.xml
index 1e970bf..c9a6c01 100644
--- a/hcatalog/core/pom.xml
+++ b/hcatalog/core/pom.xml
@@ -131,6 +131,12 @@
test
+ com.lmax
+ disruptor
+ ${disruptor.version}
+ test
+
+
org.apache.hadoop
hadoop-common
${hadoop.version}
diff --git a/llap-server/bin/llapDaemon.sh b/llap-server/bin/llapDaemon.sh
index 6f57998..566bbc8 100755
--- a/llap-server/bin/llapDaemon.sh
+++ b/llap-server/bin/llapDaemon.sh
@@ -113,7 +113,7 @@ case $startStop in
#rotate_log $logOut
echo starting llapdaemon, logging to $logLog and $logOut
export LLAP_DAEMON_LOGFILE=${LLAP_DAEMON_LOG_BASE}.log
- nohup nice -n $LLAP_DAEMON_NICENESS "$LLAP_DAEMON_BIN_HOME"/runLlapDaemon.sh run > "$logOut" 2>&1 < /dev/null &
+ nohup nice -n $LLAP_DAEMON_NICENESS "$LLAP_DAEMON_BIN_HOME"/runLlapDaemon.sh run >> "$logOut" 2>&1 < /dev/null &
echo $! > $pid
;;
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
index 22d7eec..9b10947 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
@@ -28,6 +28,7 @@
import javax.management.ObjectName;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.LogUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.llap.configuration.LlapDaemonConfiguration;
@@ -102,7 +103,7 @@ public LlapDaemon(Configuration daemonConf, int numExecutors, long executorMemor
int mngPort, int shufflePort) {
super("LlapDaemon");
- initializeLogging();
+ initializeLogging(daemonConf);
printAsciiArt();
@@ -220,13 +221,15 @@ public LlapDaemon(Configuration daemonConf, int numExecutors, long executorMemor
addIfService(amReporter);
}
- private void initializeLogging() {
+ private void initializeLogging(final Configuration conf) {
long start = System.currentTimeMillis();
URL llap_l4j2 = LlapDaemon.class.getClassLoader().getResource(LOG4j2_PROPERTIES_FILE);
if (llap_l4j2 != null) {
+ final boolean async = LogUtils.checkAndSetAsyncLogging(conf);
Configurator.initialize("LlapDaemonLog4j2", llap_l4j2.toString());
long end = System.currentTimeMillis();
- LOG.warn("LLAP daemon logging initialized from {} in {} ms", llap_l4j2, (end - start));
+ LOG.warn("LLAP daemon logging initialized from {} in {} ms. Async: {}",
+ llap_l4j2, (end - start), async);
} else {
throw new RuntimeException("Log initialization failed." +
" Unable to locate " + LOG4j2_PROPERTIES_FILE + " file in classpath");
diff --git a/llap-server/src/main/resources/llap-cli-log4j2.properties b/llap-server/src/main/resources/llap-cli-log4j2.properties
index 7542193..258766d 100644
--- a/llap-server/src/main/resources/llap-cli-log4j2.properties
+++ b/llap-server/src/main/resources/llap-cli-log4j2.properties
@@ -36,8 +36,10 @@ appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %p %c{2}: %m%n
# daily rolling file appender
-appender.DRFA.type = RollingFile
+appender.DRFA.type = RollingRandomAccessFile
appender.DRFA.name = DRFA
+appender.DRFA.append = true
+appender.DRFA.immediateFlush = false
appender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
# Use %pid in the filePattern to append @ to the filename if you want separate log files for different CLI session
appender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}
@@ -73,5 +75,6 @@ logger.HadoopConf.level = ERROR
# root logger
rootLogger.level = ${sys:hive.log.level}
+rootLogger.includeLocation = false
rootLogger.appenderRefs = root
rootLogger.appenderRef.root.ref = ${sys:hive.root.logger}
diff --git a/llap-server/src/main/resources/llap-daemon-log4j2.properties b/llap-server/src/main/resources/llap-daemon-log4j2.properties
index 268eb59..4e3ecb4 100644
--- a/llap-server/src/main/resources/llap-daemon-log4j2.properties
+++ b/llap-server/src/main/resources/llap-daemon-log4j2.properties
@@ -38,8 +38,10 @@ appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} [%t%x] %p %c{2} : %m%n
# rolling file appender
-appender.RFA.type = RollingFile
+appender.RFA.type = RollingRandomAccessFile
appender.RFA.name = RFA
+appender.RFA.append = true
+appender.RFA.immediateFlush = false
appender.RFA.fileName = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.log.file}
appender.RFA.filePattern = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.log.file}_%i
appender.RFA.layout.type = PatternLayout
@@ -51,8 +53,10 @@ appender.RFA.strategy.type = DefaultRolloverStrategy
appender.RFA.strategy.max = ${sys:llap.daemon.log.maxbackupindex}
# history file appender
-appender.HISTORYAPPENDER.type = RollingFile
+appender.HISTORYAPPENDER.type = RollingRandomAccessFile
appender.HISTORYAPPENDER.name = HISTORYAPPENDER
+appender.HISTORYAPPENDER.append = true
+appender.HISTORYAPPENDER.immediateFlush = false
appender.HISTORYAPPENDER.fileName = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.historylog.file}
appender.HISTORYAPPENDER.filePattern = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.historylog.file}_%i
appender.HISTORYAPPENDER.layout.type = PatternLayout
@@ -101,5 +105,6 @@ logger.HistoryLogger.appenderRef.HistoryAppender.ref = HISTORYAPPENDER
# root logger
rootLogger.level = ${sys:llap.daemon.log.level}
+rootLogger.includeLocation = false
rootLogger.appenderRefs = root
rootLogger.appenderRef.root.ref = ${sys:llap.daemon.root.logger}
diff --git a/llap-server/src/test/resources/llap-daemon-log4j2.properties b/llap-server/src/test/resources/llap-daemon-log4j2.properties
index 7b5f4ed..89fda60 100644
--- a/llap-server/src/test/resources/llap-daemon-log4j2.properties
+++ b/llap-server/src/test/resources/llap-daemon-log4j2.properties
@@ -38,8 +38,10 @@ appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} [%t%x] %p %c{2} : %m%n
# rolling file appender
-appender.RFA.type = RollingFile
+appender.RFA.type = RollingRandomAccessFile
appender.RFA.name = RFA
+appender.RFA.append = true
+appender.RFA.immediateFlush = false
appender.RFA.fileName = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.log.file}
appender.RFA.filePattern = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.log.file}_%i
appender.RFA.layout.type = PatternLayout
@@ -51,8 +53,10 @@ appender.RFA.strategy.type = DefaultRolloverStrategy
appender.RFA.strategy.max = ${sys:llap.daemon.log.maxbackupindex}
# history file appender
-appender.HISTORYAPPENDER.type = RollingFile
+appender.HISTORYAPPENDER.type = RollingRandomAccessFile
appender.HISTORYAPPENDER.name = HISTORYAPPENDER
+appender.HISTORYAPPENDER.append = true
+appender.HISTORYAPPENDER.immediateFlush = false
appender.HISTORYAPPENDER.fileName = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.historylog.file}
appender.HISTORYAPPENDER.filePattern = ${sys:llap.daemon.log.dir}/${sys:llap.daemon.historylog.file}_%i
appender.HISTORYAPPENDER.layout.type = PatternLayout
@@ -89,5 +93,6 @@ logger.HistoryLogger.appenderRef.HistoryAppender.ref = HISTORYAPPENDER
# root logger
rootLogger.level = ${sys:llap.daemon.log.level}
+rootLogger.includeLocation = false
rootLogger.appenderRefs = root
rootLogger.appenderRef.root.ref = ${sys:llap.daemon.root.logger}
diff --git a/metastore/pom.xml b/metastore/pom.xml
index 18c1f9c..ad9d498 100644
--- a/metastore/pom.xml
+++ b/metastore/pom.xml
@@ -220,6 +220,12 @@
${mockito-all.version}
test
+
+ com.lmax
+ disruptor
+ ${disruptor.version}
+ test
+
diff --git a/pom.xml b/pom.xml
index 2337e89..1120b99 100644
--- a/pom.xml
+++ b/pom.xml
@@ -133,6 +133,8 @@
2.6.0
${basedir}/${hive.path.to.root}/testutils/hadoop
1.1.1
+
+ 3.3.0
4.4
4.4
diff --git a/ql/pom.xml b/ql/pom.xml
index ebb9599..3240150 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -320,6 +320,12 @@
test
+ com.lmax
+ disruptor
+ ${disruptor.version}
+ test
+
+
org.codehaus.groovy
groovy-all
${groovy.version}
diff --git a/ql/src/main/resources/hive-exec-log4j2.properties b/ql/src/main/resources/hive-exec-log4j2.properties
index 4fba04c..bdcbf7f 100644
--- a/ql/src/main/resources/hive-exec-log4j2.properties
+++ b/ql/src/main/resources/hive-exec-log4j2.properties
@@ -36,8 +36,10 @@ appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n
# simple file appender
-appender.FA.type = File
+appender.FA.type = RandomAccessFile
appender.FA.name = FA
+appender.FA.append = true
+appender.FA.immediateFlush = false
appender.FA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}
appender.FA.layout.type = PatternLayout
appender.FA.layout.pattern = %d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n
@@ -62,5 +64,6 @@ logger.JPOX.level = ERROR
# root logger
rootLogger.level = ${sys:hive.log.level}
+rootLogger.includeLocation = false
rootLogger.appenderRefs = root
rootLogger.appenderRef.root.ref = ${sys:hive.root.logger}
diff --git a/ql/src/main/resources/tez-container-log4j2.properties b/ql/src/main/resources/tez-container-log4j2.properties
index 5d2b138..f13259b 100644
--- a/ql/src/main/resources/tez-container-log4j2.properties
+++ b/ql/src/main/resources/tez-container-log4j2.properties
@@ -28,8 +28,10 @@ property.tez.container.log.file = syslog
appenders = CLA
# daily rolling file appender
-appender.CLA.type = RollingFile
+appender.CLA.type = RollingRandomAccessFile
appender.CLA.name = CLA
+appender.CLA.append = true
+appender.CLA.immediateFlush = false
appender.CLA.fileName = ${sys:tez.container.log.dir}/${sys:tez.container.log.file}
appender.CLA.filePattern = ${sys:tez.container.log.dir}/${sys:tez.container.log.file}.%d{yyyy-MM-dd}
appender.CLA.layout.type = PatternLayout
@@ -43,5 +45,6 @@ appender.CLA.strategy.max = 30
# root logger
rootLogger.level = ${sys:tez.container.log.level}
+rootLogger.includeLocation = false
rootLogger.appenderRefs = root
rootLogger.appenderRef.root.ref = ${sys:tez.container.root.logger}