Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1637520) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -1529,12 +1529,13 @@ // operation log configuration HIVE_SERVER2_LOGGING_OPERATION_ENABLED("hive.server2.logging.operation.enabled", true, - "When true, HS2 will save operation logs"), + "When true, HS2 will save operation logs and make them available for clients"), HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION("hive.server2.logging.operation.log.location", "${system:java.io.tmpdir}" + File.separator + "${system:user.name}" + File.separator + "operation_logs", "Top level directory where operation logs are stored if logging functionality is enabled"), - + HIVE_SERVER2_LOGGING_OPERATION_VERBOSE("hive.server2.logging.operation.verbose", false, + "When true, HS2 operation logs available for clients will be verbose"), // logging configuration HIVE_LOG4J_FILE("hive.log4j.file", "", "Hive log4j configuration file.\n" + Index: itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java =================================================================== --- itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java (revision 1637520) +++ itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java (working copy) @@ -529,7 +529,7 @@ public void testQueryProgress() throws Throwable { final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" + "select count(*) from " + tableName + ";\n"; - final String EXPECTED_PATTERN = "Parsing command"; + final String EXPECTED_PATTERN = "number of splits"; testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(miniHS2.getBaseJdbcURL())); } Index: itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java =================================================================== --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (revision 1637520) +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (working copy) @@ -51,6 +51,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.processors.DfsProcessor; @@ -105,6 +106,7 @@ public static void setUpBeforeClass() throws SQLException, ClassNotFoundException{ Class.forName(driverName); Connection con1 = getConnection("default"); + System.setProperty(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE.varname, "" + true); Statement stmt1 = con1.createStatement(); assertNotNull("Statement is null", stmt1); Index: itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java =================================================================== --- itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java (revision 1637520) +++ itests/hive-unit/src/test/java/org/apache/hive/service/cli/TestEmbeddedThriftBinaryCLIService.java (working copy) @@ -18,6 +18,7 @@ package org.apache.hive.service.cli; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService; import org.apache.hive.service.cli.thrift.ThriftCLIService; import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient; @@ -35,6 +36,7 @@ @BeforeClass public static void setUpBeforeClass() throws Exception { service = new EmbeddedThriftBinaryCLIService(); + service.init(new HiveConf()); client = new ThriftCLIServiceClient(service); } Index: jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java =================================================================== --- jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (revision 1637520) +++ jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (working copy) @@ -52,6 +52,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.service.auth.HiveAuthFactory; @@ -143,7 +144,9 @@ isEmbeddedMode = connParams.isEmbeddedMode(); if (isEmbeddedMode) { - client = new EmbeddedThriftBinaryCLIService(); + EmbeddedThriftBinaryCLIService embeddedClient = new EmbeddedThriftBinaryCLIService(); + embeddedClient.init(new HiveConf()); + client = embeddedClient; } else { // extract user/password from JDBC connection properties if its not supplied in the // connection URL Index: ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java (revision 1637520) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java (working copy) @@ -27,6 +27,7 @@ import java.util.Map; import java.util.Set; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.Context; @@ -170,7 +171,8 @@ counters = client.getDAGStatus(statusGetOpts).getDAGCounters(); TezSessionPoolManager.getInstance().returnSession(session); - if (LOG.isInfoEnabled() && counters != null) { + if (LOG.isInfoEnabled() && counters != null + && conf.getBoolVar(conf, HiveConf.ConfVars.TEZ_EXEC_SUMMARY)) { for (CounterGroup group: counters) { LOG.info(group.getDisplayName() +":"); for (TezCounter counter: group) { Index: service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java =================================================================== --- service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java (revision 1637520) +++ service/src/java/org/apache/hive/service/cli/operation/LogDivertAppender.java (working copy) @@ -18,7 +18,9 @@ package org.apache.hive.service.cli.operation; import java.io.CharArrayWriter; +import java.util.regex.Pattern; +import org.apache.hadoop.hive.ql.exec.Task; import org.apache.log4j.Layout; import org.apache.log4j.Logger; import org.apache.log4j.WriterAppender; @@ -25,6 +27,8 @@ import org.apache.log4j.spi.Filter; import org.apache.log4j.spi.LoggingEvent; +import com.google.common.base.Joiner; + /** * An Appender to divert logs from individual threads to the LogObject they belong to. */ @@ -33,20 +37,29 @@ private final OperationManager operationManager; /** - * A log filter that exclude messages coming from the logger with the given name. - * We apply this filter on the Loggers used by the log diversion stuff, so that + * A log filter that filters messages coming from the logger with the given names. + * It be used as a white list filter or a black list filter. + * We apply black list filter on the Loggers used by the log diversion stuff, so that * they don't generate more logs for themselves when they process logs. + * White list filter is used for less verbose log collection */ - private static class NameExclusionFilter extends Filter { - private String excludeLoggerName = null; + private static class NameFilter extends Filter { + private final Pattern namePattern; + private final boolean excludeMatches; - public NameExclusionFilter(String excludeLoggerName) { - this.excludeLoggerName = excludeLoggerName; + public NameFilter(boolean isExclusionFilter, String [] loggerNames) { + this.excludeMatches = isExclusionFilter; + String matchRegex = Joiner.on("|").join(loggerNames); + this.namePattern = Pattern.compile(matchRegex); } @Override public int decide(LoggingEvent ev) { - if (ev.getLoggerName().equals(excludeLoggerName)) { + boolean isMatch = namePattern.matcher(ev.getLoggerName()).matches(); + if (excludeMatches == isMatch) { + // Deny if this is black-list filter (excludeMatches = true) and it + // matched + // or if this is whitelist filter and it didn't match return Filter.DENY; } return Filter.NEUTRAL; @@ -56,21 +69,30 @@ /** This is where the log message will go to */ private final CharArrayWriter writer = new CharArrayWriter(); - public LogDivertAppender(Layout layout, OperationManager operationManager) { + public LogDivertAppender(Layout layout, OperationManager operationManager, boolean isVerbose) { setLayout(layout); setWriter(writer); setName("LogDivertAppender"); this.operationManager = operationManager; - // Filter out messages coming from log processing classes, or we'll run an infinite loop. - addFilter(new NameExclusionFilter(LOG.getName())); - addFilter(new NameExclusionFilter(OperationLog.class.getName())); - addFilter(new NameExclusionFilter(OperationManager.class.getName())); + if (isVerbose) { + // Filter out messages coming from log processing classes, or we'll run an + // infinite loop. + String[] inclLoggerNames = { LOG.getName(), OperationLog.class.getName(), + OperationManager.class.getName() }; + addFilter(new NameFilter(true, inclLoggerNames)); + } else { + // in non verbose mode, show only select logger messages + String[] exclLoggerNames = { "org.apache.hadoop.mapreduce.JobSubmitter", + "org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName() + }; + addFilter(new NameFilter(false, exclLoggerNames)); + } } /** - * Overrides WriterAppender.subAppend(), which does the real logging. - * No need to worry about concurrency since log4j calls this synchronously. + * Overrides WriterAppender.subAppend(), which does the real logging. No need + * to worry about concurrency since log4j calls this synchronously. */ @Override protected void subAppend(LoggingEvent event) { Index: service/src/java/org/apache/hive/service/cli/operation/OperationManager.java =================================================================== --- service/src/java/org/apache/hive/service/cli/operation/OperationManager.java (revision 1637520) +++ service/src/java/org/apache/hive/service/cli/operation/OperationManager.java (working copy) @@ -18,8 +18,8 @@ package org.apache.hive.service.cli.operation; +import java.util.ArrayList; import java.util.Enumeration; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -30,9 +30,20 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hive.service.AbstractService; -import org.apache.hive.service.cli.*; +import org.apache.hive.service.cli.FetchOrientation; +import org.apache.hive.service.cli.HiveSQLException; +import org.apache.hive.service.cli.OperationHandle; +import org.apache.hive.service.cli.OperationState; +import org.apache.hive.service.cli.OperationStatus; +import org.apache.hive.service.cli.RowSet; +import org.apache.hive.service.cli.RowSetFactory; +import org.apache.hive.service.cli.TableSchema; import org.apache.hive.service.cli.session.HiveSession; -import org.apache.log4j.*; +import org.apache.log4j.Appender; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Layout; +import org.apache.log4j.Logger; +import org.apache.log4j.PatternLayout; /** * OperationManager. @@ -39,7 +50,6 @@ * */ public class OperationManager extends AbstractService { - private static final String DEFAULT_LAYOUT_PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n"; private final Log LOG = LogFactory.getLog(OperationManager.class.getName()); private HiveConf hiveConf; @@ -54,7 +64,8 @@ public synchronized void init(HiveConf hiveConf) { this.hiveConf = hiveConf; if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) { - initOperationLogCapture(); + boolean isVerbose = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE); + initOperationLogCapture(isVerbose); } else { LOG.debug("Operation level logging is turned off"); } @@ -73,7 +84,7 @@ super.stop(); } - private void initOperationLogCapture() { + private void initOperationLogCapture(boolean isVerbose) { // There should be a ConsoleAppender. Copy its Layout. Logger root = Logger.getRootLogger(); Layout layout = null; @@ -87,13 +98,19 @@ } } - if (layout == null) { - layout = new PatternLayout(DEFAULT_LAYOUT_PATTERN); - LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern."); + final String VERBOSE_PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n"; + final String NONVERBOSE_PATTERN = "%-5p : %m%n"; + + if (isVerbose) { + if (layout == null) { + layout = new PatternLayout(VERBOSE_PATTERN); + LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern."); + } + } else { + layout = new PatternLayout(NONVERBOSE_PATTERN); } - // Register another Appender (with the same layout) that talks to us. - Appender ap = new LogDivertAppender(layout, this); + Appender ap = new LogDivertAppender(layout, this, isVerbose); root.addAppender(ap); } Index: service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java =================================================================== --- service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java (revision 1637520) +++ service/src/java/org/apache/hive/service/cli/thrift/EmbeddedThriftBinaryCLIService.java (working copy) @@ -33,8 +33,13 @@ super(new CLIService(null)); isEmbedded = true; HiveConf.setLoadHiveServer2Config(true); - cliService.init(new HiveConf()); + } + + @Override + public synchronized void init(HiveConf hiveConf) { + cliService.init(hiveConf); cliService.start(); + super.init(hiveConf); } public ICLIService getService() { Index: service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java =================================================================== --- service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java (revision 1637520) +++ service/src/test/org/apache/hive/service/cli/operation/TestOperationLoggingAPI.java (working copy) @@ -17,29 +17,38 @@ */ package org.apache.hive.service.cli.operation; -import org.junit.Assert; +import java.io.File; + import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hive.service.cli.*; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hive.service.cli.FetchOrientation; +import org.apache.hive.service.cli.FetchType; +import org.apache.hive.service.cli.HiveSQLException; +import org.apache.hive.service.cli.OperationHandle; +import org.apache.hive.service.cli.OperationState; +import org.apache.hive.service.cli.OperationStatus; +import org.apache.hive.service.cli.RowSet; +import org.apache.hive.service.cli.SessionHandle; import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService; import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient; import org.junit.After; +import org.junit.Assert; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; -import java.io.File; - /** * TestOperationLoggingAPI * Test the FetchResults of TFetchType.LOG in thrift level. */ public class TestOperationLoggingAPI { - private HiveConf hiveConf = new HiveConf(); - private String tableName = "testOperationLoggingAPI_table"; + private static HiveConf hiveConf; + private final String tableName = "testOperationLoggingAPI_table"; private File dataFile; private ThriftCLIServiceClient client; private SessionHandle sessionHandle; - private String sql = "select * from " + tableName; - private String[] expectedLogs = { + private final String sql = "select * from " + tableName; + private final String[] expectedLogs = { "Parsing command", "Parse Completed", "Starting Semantic Analysis", @@ -47,6 +56,12 @@ "Starting command" }; + @BeforeClass + public static void setUpBeforeClass() { + hiveConf = new HiveConf(); + hiveConf.setBoolean(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE.varname, true); + } + /** * Start embedded mode, open a session, and create a table for cases usage * @throws Exception @@ -247,7 +262,7 @@ private void verifyFetchedLog(String logs) { for (String log : expectedLogs) { - Assert.assertTrue(logs.contains(log)); + Assert.assertTrue("Checking for presence of " + log, logs.contains(log)); } } }