diff --git common/src/java/org/apache/hadoop/hive/common/LogUtils.java common/src/java/org/apache/hadoop/hive/common/LogUtils.java index ff44265..23b9921 100644 --- common/src/java/org/apache/hadoop/hive/common/LogUtils.java +++ common/src/java/org/apache/hadoop/hive/common/LogUtils.java @@ -19,17 +19,26 @@ package org.apache.hadoop.hive.common; import java.net.URL; +import java.io.File; +import java.io.IOException; +import java.io.FileNotFoundException; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.log4j.LogManager; import org.apache.log4j.PropertyConfigurator; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; /** * Utilities common to logging operations. */ public class LogUtils { - public static final String HIVE_L4J = "hive-log4j.properties"; - public static final String HIVE_EXEC_L4J = "hive-exec-log4j.properties"; + private static final String HIVE_L4J = "hive-log4j.properties"; + private static final String HIVE_EXEC_L4J = "hive-exec-log4j.properties"; + private static final Log l4j = LogFactory.getLog(LogUtils.class); @SuppressWarnings("serial") public static class LogInitializationException extends Exception { @@ -39,21 +48,101 @@ public class LogUtils { } /** - * Initialize log4j based on hive-log4j.properties. + * Initialize log4j. * * @return an message suitable for display to the user * @throws LogInitializationException if log4j fails to initialize correctly */ - public static String initHiveLog4j() throws LogInitializationException { - // allow hive log4j to override any normal initialized one - URL hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J); + public static String initHiveLog4j() + throws LogInitializationException { + return initHiveLog4jCommon(HiveConf.ConfVars.HIVE_LOG4J_FILE); + } + + /** + * Initialize log4j for execution mode. + * + * @return an message suitable for display to the user + * @throws LogInitializationException if log4j-exec fails to initialize correctly + */ + public static String initHiveExecLog4j() + throws LogInitializationException { + return initHiveLog4jCommon(HiveConf.ConfVars.HIVE_EXEC_LOG4J_FILE); + } + + private static String initHiveLog4jCommon(ConfVars confVarName) + throws LogInitializationException { + HiveConf conf = new HiveConf(); + if (HiveConf.getVar(conf, confVarName).equals("")) { + // if log4j configuration file not set, or could not found, use default setting + return initHiveLog4jDefault(conf, "", confVarName); + } else { + // if log4j configuration file found successfully, use HiveConf property value + String log4jFileName = HiveConf.getVar(conf, confVarName); + File log4jConfigFile = new File(log4jFileName); + boolean fileExists = log4jConfigFile.exists(); + if (!fileExists) { + // if property specified file not found in local file system + // use default setting + return initHiveLog4jDefault( + conf, "Not able to find conf file: " + log4jConfigFile, confVarName); + } else { + // property speficied file found in local file system + // use the specified file + if (confVarName == HiveConf.ConfVars.HIVE_EXEC_LOG4J_FILE) { + System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID)); + } + LogManager.resetConfiguration(); + PropertyConfigurator.configure(log4jFileName); + logConfigLocation(conf); + return ("Logging initialized using configuration in " + log4jConfigFile); + } + } + } + + private static String initHiveLog4jDefault( + HiveConf conf, String logMessage, ConfVars confVarName) + throws LogInitializationException { + URL hive_l4j = null; + switch (confVarName) { + case HIVE_EXEC_LOG4J_FILE: + hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_EXEC_L4J); + if (hive_l4j == null) { + hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J); + } + System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID)); + break; + case HIVE_LOG4J_FILE: + hive_l4j = LogUtils.class.getClassLoader().getResource(HIVE_L4J); + break; + default: + break; + } if (hive_l4j != null) { LogManager.resetConfiguration(); PropertyConfigurator.configure(hive_l4j); - return "Logging initialized using configuration in " + hive_l4j; + logConfigLocation(conf); + return (logMessage + "\n" + "Logging initialized using configuration in " + hive_l4j); + } else { + throw new LogInitializationException( + logMessage + "Unable to initialize logging using " + + LogUtils.HIVE_L4J + ", not found on CLASSPATH!"); + } + } + + private static void logConfigLocation(HiveConf conf) throws LogInitializationException { + // Log a warning if hive-default.xml is found on the classpath + if (conf.getHiveDefaultLocation() != null) { + l4j.warn("DEPRECATED: Ignoring hive-default.xml found on the CLASSPATH at " + + conf.getHiveDefaultLocation().getPath()); + } + // Look for hive-site.xml on the CLASSPATH and log its location if found. + if (conf.getHiveSiteLocation() == null) { + l4j.warn("hive-site.xml not found on CLASSPATH"); } else { - throw new LogInitializationException("Unable to initialize logging using " - + LogUtils.HIVE_L4J + ", not found on CLASSPATH!"); + l4j.debug("Using hive-site.xml found on CLASSPATH at " + + conf.getHiveSiteLocation().getPath()); } } } diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 9fa9525..d40c866 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -36,6 +36,8 @@ import javax.security.auth.login.LoginException; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.LogUtils; +import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.mapred.JobConf; @@ -51,6 +53,7 @@ public class HiveConf extends Configuration { protected Properties origProp; protected String auxJars; private static final Log l4j = LogFactory.getLog(HiveConf.class); + private static URL hiveDefaultURL = null; private static URL hiveSiteURL = null; private static URL confVarURL = null; @@ -62,20 +65,10 @@ public class HiveConf extends Configuration { classLoader = HiveConf.class.getClassLoader(); } - // Log a warning if hive-default.xml is found on the classpath - URL hiveDefaultURL = classLoader.getResource("hive-default.xml"); - if (hiveDefaultURL != null) { - l4j.warn("DEPRECATED: Ignoring hive-default.xml found on the CLASSPATH at " + - hiveDefaultURL.getPath()); - } + hiveDefaultURL = classLoader.getResource("hive-default.xml"); // Look for hive-site.xml on the CLASSPATH and log its location if found. hiveSiteURL = classLoader.getResource("hive-site.xml"); - if (hiveSiteURL == null) { - l4j.warn("hive-site.xml not found on CLASSPATH"); - } else { - l4j.debug("Using hive-site.xml found on CLASSPATH at " + hiveSiteURL.getPath()); - } for (ConfVars confVar : ConfVars.values()) { vars.put(confVar.varname, confVar); } @@ -629,6 +622,10 @@ public class HiveConf extends Configuration { HIVE_CONCATENATE_CHECK_INDEX ("hive.exec.concatenate.check.index", true), HIVE_IO_EXCEPTION_HANDLERS("hive.io.exception.handlers", ""), + // logging configuration + HIVE_LOG4J_FILE("hive.log4j.file", ""), + HIVE_EXEC_LOG4J_FILE("hive.exec.log4j.file", ""), + //prefix used to auto generated column aliases HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL("hive.autogen.columnalias.prefix.label", "_c"), HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME( @@ -1067,10 +1064,6 @@ public class HiveConf extends Configuration { return (ret); } - public String getHiveSitePath() { - return hiveSiteURL.getPath(); - } - public String getJar() { return hiveJar; } @@ -1090,6 +1083,14 @@ public class HiveConf extends Configuration { setVar(this, ConfVars.HIVEAUXJARS, auxJars); } + public URL getHiveDefaultLocation() { + return hiveDefaultURL; + } + + public URL getHiveSiteLocation() { + return hiveSiteURL; + } + /** * @return the user name set in hadoop.job.ugi param or the current user from System * @throws IOException diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java index a3088b2..f64b164 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java @@ -35,7 +35,7 @@ public class TestHiveConf extends TestCase { public void testHiveSitePath() throws Exception { String expectedPath = new Path(System.getProperty("test.build.resources") + "/hive-site.xml").toUri().getPath(); - assertEquals(expectedPath, new HiveConf().getHiveSitePath()); + assertEquals(expectedPath, new HiveConf().getHiveSiteLocation().getPath()); } private void checkHadoopConf(String name, String expectedHadoopVal) throws Exception { diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java new file mode 100644 index 0000000..1bc4424 --- /dev/null +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java @@ -0,0 +1,125 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.conf; + +import junit.framework.TestCase; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.common.LogUtils; +import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; + +/** + * TestHiveLogging + * + * Test cases for HiveLogging, which is initialized in HiveConf. + * Loads configuration files located in common/src/test/resources. + */ +public class TestHiveLogging extends TestCase { + private Runtime runTime; + private Process process; + + public TestHiveLogging() { + super(); + runTime = Runtime.getRuntime(); + process = null; + } + + private void configLog(String hiveLog4jTest, String hiveExecLog4jTest) { + System.setProperty(ConfVars.HIVE_LOG4J_FILE.varname, + System.getProperty("test.build.resources") + "/" + hiveLog4jTest); + System.setProperty(ConfVars.HIVE_EXEC_LOG4J_FILE.varname, + System.getProperty("test.build.resources") + "/" + hiveExecLog4jTest); + + String expectedLog4jPath = System.getProperty("test.build.resources") + + "/" + hiveLog4jTest; + String expectedLog4jExecPath = System.getProperty("test.build.resources") + + "/" + hiveExecLog4jTest; + + try { + LogUtils.initHiveLog4j(); + } catch (LogInitializationException e) { + } + + HiveConf conf = new HiveConf(); + assertEquals(expectedLog4jPath, conf.getVar(ConfVars.HIVE_LOG4J_FILE)); + assertEquals(expectedLog4jExecPath, conf.getVar(ConfVars.HIVE_EXEC_LOG4J_FILE)); + } + + private void runCmd(String cmd) { + try { + process = runTime.exec(cmd); + } catch (IOException e) { + e.printStackTrace(); + } + try { + process.waitFor(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + private void getCmdOutput(String logFile) { + boolean logCreated = false; + BufferedReader buf = new BufferedReader( + new InputStreamReader(process.getInputStream())); + String line = ""; + try { + while((line = buf.readLine()) != null) { + if (line.equals(logFile)) + logCreated = true; + } + } catch (IOException e) { + e.printStackTrace(); + } + assertEquals(true, logCreated); + } + + private void RunTest(String cleanCmd, String findCmd, String logFile, + String hiveLog4jProperty, String hiveExecLog4jProperty) throws Exception { + // clean test space + runCmd(cleanCmd); + + // config log4j with customized files + // check whether HiveConf initialize log4j correctly + configLog(hiveLog4jProperty, hiveExecLog4jProperty); + + // check whether log file is created on test running + runCmd(findCmd); + getCmdOutput(logFile); + + // clean test space + runCmd(cleanCmd); + } + + public void testHiveLogging() throws Exception { + // customized log4j config log file to be: /tmp/hiveLog4jTest.log + String customLogPath = "/tmp/"; + String customLogName = "hiveLog4jTest.log"; + String customLogFile = customLogPath + customLogName; + String customCleanCmd = "rm -rf " + customLogFile; + String customFindCmd = "find /tmp -name " + customLogName; + RunTest(customCleanCmd, customFindCmd, customLogFile, + "hive-log4j-test.properties", "hive-exec-log4j-test.properties"); + } +} diff --git common/src/test/resources/hive-exec-log4j-test.properties common/src/test/resources/hive-exec-log4j-test.properties new file mode 100644 index 0000000..ece5875 --- /dev/null +++ common/src/test/resources/hive-exec-log4j-test.properties @@ -0,0 +1,55 @@ +# Define some default values that can be overridden by system properties +hive.root.logger=INFO,FA +hive.log.dir=/tmp +hive.log.file=hiveExecLog4jTest.log + +# Define the root logger to the system property "hadoop.root.logger". +log4j.rootLogger=${hive.root.logger}, EventCounter + +# Logging Threshold +log4j.threshhold=WARN + +# +# File Appender +# + +log4j.appender.FA=org.apache.log4j.FileAppender +log4j.appender.FA.File=${hive.log.dir}/${hive.log.file} +log4j.appender.FA.layout=org.apache.log4j.PatternLayout + +# Pattern format: Date LogLevel LoggerName LogMessage +#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n +# Debugging Pattern format +log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n + + +# +# console +# Add "console" to rootlogger above if you want to use this +# + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n + +#custom logging levels +#log4j.logger.xxx=DEBUG + +# +# Event Counter Appender +# Sends counts of logging messages at different severity levels to Hadoop Metrics. +# +log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter + + +log4j.category.DataNucleus=ERROR,FA +log4j.category.Datastore=ERROR,FA +log4j.category.Datastore.Schema=ERROR,FA +log4j.category.JPOX.Datastore=ERROR,FA +log4j.category.JPOX.Plugin=ERROR,FA +log4j.category.JPOX.MetaData=ERROR,FA +log4j.category.JPOX.Query=ERROR,FA +log4j.category.JPOX.General=ERROR,FA +log4j.category.JPOX.Enhancer=ERROR,FA + diff --git common/src/test/resources/hive-log4j-test.properties common/src/test/resources/hive-log4j-test.properties new file mode 100644 index 0000000..2f08e9a --- /dev/null +++ common/src/test/resources/hive-log4j-test.properties @@ -0,0 +1,67 @@ +# Define some default values that can be overridden by system properties +hive.root.logger=WARN,DRFA +hive.log.dir=/tmp +hive.log.file=hiveLog4jTest.log + +# Define the root logger to the system property "hadoop.root.logger". +log4j.rootLogger=${hive.root.logger}, EventCounter + +# Logging Threshold +log4j.threshhold=WARN + +# +# Daily Rolling File Appender +# +# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files +# for different CLI session. +# +# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender + +log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender + +log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} + +# Rollver at midnight +log4j.appender.DRFA.DatePattern=.yyyy-MM-dd + +# 30-day backup +#log4j.appender.DRFA.MaxBackupIndex=30 +log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout + +# Pattern format: Date LogLevel LoggerName LogMessage +#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n +# Debugging Pattern format +log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n + + +# +# console +# Add "console" to rootlogger above if you want to use this +# + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n +log4j.appender.console.encoding=UTF-8 + +#custom logging levels +#log4j.logger.xxx=DEBUG + +# +# Event Counter Appender +# Sends counts of logging messages at different severity levels to Hadoop Metrics. +# +log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter + + +log4j.category.DataNucleus=ERROR,DRFA +log4j.category.Datastore=ERROR,DRFA +log4j.category.Datastore.Schema=ERROR,DRFA +log4j.category.JPOX.Datastore=ERROR,DRFA +log4j.category.JPOX.Plugin=ERROR,DRFA +log4j.category.JPOX.MetaData=ERROR,DRFA +log4j.category.JPOX.Query=ERROR,DRFA +log4j.category.JPOX.General=ERROR,DRFA +log4j.category.JPOX.Enhancer=ERROR,DRFA + diff --git conf/hive-default.xml.template conf/hive-default.xml.template index f332f3a..f1fc974 100644 --- conf/hive-default.xml.template +++ conf/hive-default.xml.template @@ -1566,5 +1566,20 @@ + + hive.log4j.file + + Hive log4j configuration file. + If the property is not set, then logging will be initialized using hive-log4j.properties found on the classpath. + If the property is set, the value must be a valid URI (java.net.URI, e.g. "file:///tmp/my-logging.properties"), which you can then extract a URL from and pass to PropertyConfigurator.configure(URL). + + + + hive.exec.log4j.file + + Hive log4j configuration file for execution mode(sub command). + If the property is not set, then logging will be initialized using hive-exec-log4j.properties found on the classpath. + If the property is set, the value must be a valid URI (java.net.URI, e.g. "file:///tmp/my-logging.properties"), which you can then extract a URL from and pass to PropertyConfigurator.configure(URL). + diff --git metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 66fd0dc..612f92d 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -4102,8 +4102,8 @@ public class HiveMetaStore extends ThriftHiveMetastore { // any log specific settings via hiveconf will be ignored Properties hiveconf = cli.addHiveconfToSystemProperties(); - // If the log4j.configuration property hasn't already been explicitly set, use Hive's default - // log4j configuration + // If the log4j.configuration property hasn't already been explicitly set, + // use Hive's default log4j configuration if (System.getProperty("log4j.configuration") == null) { // NOTE: It is critical to do this here so that log4j is reinitialized // before any of the other core hive classes are loaded diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java index 8eef395..805e91c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java @@ -48,6 +48,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.CompressionUtils; import org.apache.hadoop.hive.common.LogUtils; +import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.Context; @@ -563,18 +564,10 @@ public class ExecDriver extends Task implements Serializable, Hadoop */ private static void setupChildLog4j(Configuration conf) { - URL hive_l4j = ExecDriver.class.getClassLoader().getResource(LogUtils.HIVE_EXEC_L4J); - if (hive_l4j == null) { - hive_l4j = ExecDriver.class.getClassLoader().getResource(LogUtils.HIVE_L4J); - } - - if (hive_l4j != null) { - // setting queryid so that log4j configuration can use it to generate - // per query log file - System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), HiveConf.getVar(conf, - HiveConf.ConfVars.HIVEQUERYID)); - LogManager.resetConfiguration(); - PropertyConfigurator.configure(hive_l4j); + try { + LogUtils.initHiveExecLog4j(); + } catch (LogInitializationException e) { + System.err.println(e.getMessage()); } } diff --git ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index c516f24..a783303 100644 --- ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -119,12 +119,10 @@ public class TestHiveHistory extends TestCase { try { // NOTE: It is critical to do this here so that log4j is reinitialized - // before - // any of the other core hive classes are loaded + // before any of the other core hive classes are loaded try { LogUtils.initHiveLog4j(); } catch (LogInitializationException e) { - // ignore } CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));