.../hadoop/hive/ql/session/OperationLog.java | 116 +++++++++++---------- 1 file changed, 62 insertions(+), 54 deletions(-) diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java b/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java index fc2800a..07d9b0e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/OperationLog.java @@ -17,25 +17,32 @@ */ package org.apache.hadoop.hive.ql.session; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStreamReader; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + import org.apache.commons.io.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.*; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.List; - /** * OperationLog wraps the actual operation log file, and provides interface * for accessing, reading, writing, and removing the file. */ public class OperationLog { - private static final Logger LOG = LoggerFactory.getLogger(OperationLog.class.getName()); + private static final Logger LOG = LoggerFactory.getLogger(OperationLog.class); private final String operationName; + private final LogFile logFile; // If in test mode then the LogDivertAppenderForTest created an extra log file containing only // the output needed for the qfile results. @@ -45,7 +52,8 @@ private final boolean isShortLogs; // True if the logs should be removed after the operation. Should be used only in test mode private final boolean isRemoveLogs; - private LoggingLevel opLoggingLevel = LoggingLevel.UNKNOWN; + + private final LoggingLevel opLoggingLevel; public enum LoggingLevel { NONE, EXECUTION, PERFORMANCE, VERBOSE, UNKNOWN @@ -58,6 +66,8 @@ public OperationLog(String name, File file, HiveConf hiveConf) { if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) { String logLevel = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL); opLoggingLevel = getLoggingLevel(logLevel); + } else { + opLoggingLevel = LoggingLevel.UNKNOWN; } // If in test mod create a test log file which will contain only logs which are supposed to @@ -79,15 +89,17 @@ public OperationLog(String name, File file, HiveConf hiveConf) { } public static LoggingLevel getLoggingLevel (String mode) { - if (mode.equalsIgnoreCase("none")) { + String m = org.apache.commons.lang.StringUtils.defaultString(mode).toLowerCase(); + switch (m) { + case "none": return LoggingLevel.NONE; - } else if (mode.equalsIgnoreCase("execution")) { + case "execution": return LoggingLevel.EXECUTION; - } else if (mode.equalsIgnoreCase("verbose")) { + case "verbose": return LoggingLevel.VERBOSE; - } else if (mode.equalsIgnoreCase("performance")) { + case "performance": return LoggingLevel.PERFORMANCE; - } else { + default: return LoggingLevel.UNKNOWN; } } @@ -105,11 +117,8 @@ public LoggingLevel getOpLoggingLevel() { */ public List readOperationLog(boolean isFetchFirst, long maxRows) throws SQLException { - if (isShortLogs) { - return testLogFile.read(isFetchFirst, maxRows); - } else { - return logFile.read(isFetchFirst, maxRows); - } + LogFile lf = (isShortLogs) ? testLogFile : logFile; + return lf.read(isFetchFirst, maxRows); } /** @@ -144,8 +153,10 @@ public void close() { if (isFetchFirst) { resetIn(); } - - return readResults(maxRows); + if (maxRows > (long) Integer.MAX_VALUE) { + throw new SQLException("Cannot support loading this many rows: " + maxRows); + } + return readResults((int)maxRows); } /** @@ -154,60 +165,57 @@ public void close() { */ synchronized void close(boolean removeLog) { try { - if (in != null) { - in.close(); - } - if (!isRemoved && removeLog && file.exists()) { - FileUtils.forceDelete(file); + resetIn(); + if (removeLog && !isRemoved) { + if (file.exists()) { + FileUtils.forceDelete(file); + } isRemoved = true; } - } catch (Exception e) { - LOG.error("Failed to remove corresponding log file of operation: " + operationName, e); + } catch (IOException e) { + LOG.error("Failed to remove corresponding log file of operation: {}", operationName, e); } } private void resetIn() { - if (in != null) { - IOUtils.closeStream(in); - in = null; - } + IOUtils.closeStream(in); + in = null; } - private List readResults(long nLines) throws SQLException { - List logs = new ArrayList(); + private List readResults(int nLines) throws SQLException { + List logs = new ArrayList(64); if (in == null) { try { in = new BufferedReader(new InputStreamReader(new FileInputStream(file))); - // Adding name of the log file in an extra log line, so it is easier to find - // the original if there is a test error if (isShortLogs) { - logs.add("Reading log file: " + file); - nLines--; - } + // Adding name of the log file in an extra log line, so it is easier to find + // the original if there is a test error + logs.add("Reading log file: " + file); + } } catch (FileNotFoundException e) { - return logs; + return Collections.emptyList(); } } - String line = ""; - // if nLines <= 0, read all lines in log file. - for (int i = 0; i < nLines || nLines <= 0; i++) { - try { + final int maxLines = (nLines <= 0) ? Integer.MAX_VALUE : nLines; + int currentLine = 0; + + try { + String line = in.readLine(); + while (line != null && (currentLine < maxLines)) { + logs.add(line); line = in.readLine(); - if (line == null) { - break; - } else { - logs.add(line); - } - } catch (IOException e) { - if (isRemoved) { - throw new SQLException("The operation has been closed and its log file " + - file.getAbsolutePath() + " has been removed.", e); - } else { - throw new SQLException("Reading operation log file encountered an exception: ", e); - } + currentLine++; + } + } catch (IOException e) { + if (isRemoved) { + throw new SQLException("The operation has been closed and its log file " + + file.getAbsolutePath() + " will be removed", e); + } else { + throw new SQLException("Reading operation log file encountered an exception", e); } } + return logs; } }