diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java index 2e9e92d..53c76fa 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java @@ -227,7 +227,7 @@ public void testFetchApplictionLogs() throws Exception { assertTrue(exitCode == 0); assertTrue(sysOutStream.toString().contains( "Hello container_0_0001_01_000001!")); - assertTrue(sysOutStream.toString().contains("LogUploadTime")); + assertTrue(sysOutStream.toString().contains("Log Upload Time")); fs.delete(new Path(remoteLogRootDir), true); fs.delete(new Path(rootLogDir), true); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java index 22219be..70830d3 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java @@ -233,9 +233,6 @@ public void write(DataOutputStream out, Set pendingUploadFiles) // Write the logFile Type out.writeUTF(logFile.getName()); - // Write the uploaded TimeStamp - out.writeLong(System.currentTimeMillis()); - // Write the log length as UTF so that it is printable out.writeUTF(String.valueOf(fileLength)); @@ -639,14 +636,14 @@ public ContainerLogsReader getContainerLogsReader( * Writes all logs for a single container to the provided writer. * @param valueStream * @param writer + * @param logUploadedTime * @throws IOException */ public static void readAcontainerLogs(DataInputStream valueStream, - Writer writer) throws IOException { + Writer writer, long logUploadedTime) throws IOException { int bufferSize = 65536; char[] cbuf = new char[bufferSize]; String fileType; - long uploadTime; String fileLengthStr; long fileLength; @@ -657,13 +654,14 @@ public static void readAcontainerLogs(DataInputStream valueStream, // EndOfFile return; } - uploadTime = valueStream.readLong(); fileLengthStr = valueStream.readUTF(); fileLength = Long.parseLong(fileLengthStr); writer.write("\n\nLogType:"); writer.write(fileType); - writer.write("\nLogUploadTime:"); - writer.write(String.valueOf(uploadTime)); + if (logUploadedTime != -1) { + writer.write("\nLog Upload Time:"); + writer.write(Times.format(logUploadedTime)); + } writer.write("\nLogLength:"); writer.write(fileLengthStr); writer.write("\nLog Contents:\n"); @@ -681,27 +679,40 @@ public static void readAcontainerLogs(DataInputStream valueStream, } /** + * Writes all logs for a single container to the provided writer. + * @param valueStream + * @param writer + * @throws IOException + */ + public static void readAcontainerLogs(DataInputStream valueStream, + Writer writer) throws IOException { + readAcontainerLogs(valueStream, writer, -1); + } + + /** * Keep calling this till you get a {@link EOFException} for getting logs of * all types for a single container. * * @param valueStream * @param out + * @param logUploadedTime * @throws IOException */ public static void readAContainerLogsForALogType( - DataInputStream valueStream, PrintStream out) + DataInputStream valueStream, PrintStream out, long logUploadedTime) throws IOException { byte[] buf = new byte[65535]; String fileType = valueStream.readUTF(); - long uploadTime = valueStream.readLong(); String fileLengthStr = valueStream.readUTF(); long fileLength = Long.parseLong(fileLengthStr); out.print("LogType: "); out.println(fileType); - out.print("LogUploadTime: "); - out.println(Times.format(uploadTime)); + if (logUploadedTime != -1) { + out.print("Log Upload Time: "); + out.println(Times.format(logUploadedTime)); + } out.print("LogLength: "); out.println(fileLengthStr); out.println("Log Contents:"); @@ -723,6 +734,20 @@ public static void readAContainerLogsForALogType( out.println(""); } + /** + * Keep calling this till you get a {@link EOFException} for getting logs of + * all types for a single container. + * + * @param valueStream + * @param out + * @throws IOException + */ + public static void readAContainerLogsForALogType( + DataInputStream valueStream, PrintStream out) + throws IOException { + readAContainerLogsForALogType(valueStream, out, -1); + } + public void close() { IOUtils.cleanup(LOG, scanner, reader, fsDataIStream); } @@ -732,7 +757,6 @@ public void close() { public static class ContainerLogsReader { private DataInputStream valueStream; private String currentLogType = null; - private long currentLogUpLoadTime = 0; private long currentLogLength = 0; private BoundedInputStream currentLogData = null; private InputStreamReader currentLogISR; @@ -753,14 +777,12 @@ public String nextLog() throws IOException { } currentLogType = null; - currentLogUpLoadTime = 0; currentLogLength = 0; currentLogData = null; currentLogISR = null; try { String logType = valueStream.readUTF(); - long logUpLoadTime = valueStream.readLong(); String logLengthStr = valueStream.readUTF(); currentLogLength = Long.parseLong(logLengthStr); currentLogData = @@ -768,7 +790,6 @@ public String nextLog() throws IOException { currentLogData.setPropagateClose(false); currentLogISR = new InputStreamReader(currentLogData); currentLogType = logType; - currentLogUpLoadTime = logUpLoadTime; } catch (EOFException e) { } @@ -779,10 +800,6 @@ public String getCurrentLogType() { return currentLogType; } - public long getCurrentLogUpLoadTime() { - return currentLogUpLoadTime; - } - public long getCurrentLogLength() { return currentLogLength; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java index de06d48..daf0c22 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogCLIHelpers.java @@ -78,7 +78,8 @@ public int dumpAContainersLogs(String appId, String containerId, reader = new AggregatedLogFormat.LogReader(getConf(), thisNodeFile.getPath()); - if (dumpAContainerLogs(containerId, reader, System.out) > -1) { + if (dumpAContainerLogs(containerId, reader, System.out, + thisNodeFile.getModificationTime()) > -1) { foundContainerLogs = true; } } finally { @@ -97,7 +98,8 @@ public int dumpAContainersLogs(String appId, String containerId, @Private public int dumpAContainerLogs(String containerIdStr, - AggregatedLogFormat.LogReader reader, PrintStream out) throws IOException { + AggregatedLogFormat.LogReader reader, PrintStream out, + long logUploadedTime) throws IOException { DataInputStream valueStream; LogKey key = new LogKey(); valueStream = reader.next(key); @@ -114,7 +116,8 @@ public int dumpAContainerLogs(String containerIdStr, while (true) { try { - LogReader.readAContainerLogsForALogType(valueStream, out); + LogReader.readAContainerLogsForALogType(valueStream, out, + logUploadedTime); } catch (EOFException eof) { break; } @@ -163,7 +166,8 @@ public int dumpAllContainersLogs(ApplicationId appId, String appOwner, out.println(StringUtils.repeat("=", containerString.length())); while (true) { try { - LogReader.readAContainerLogsForALogType(valueStream, out); + LogReader.readAContainerLogsForALogType(valueStream, out, + thisNodeFile.getModificationTime()); foundAnyLogs = true; } catch (EOFException eof) { break; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java index bba3258..102df92 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java @@ -126,6 +126,7 @@ protected void render(Block html) { .endsWith(LogAggregationUtils.TMP_FILE_SUFFIX)) { continue; } + long logUploadedTime = thisNodeFile.getModificationTime(); reader = new AggregatedLogFormat.LogReader(conf, thisNodeFile.getPath()); @@ -164,7 +165,7 @@ protected void render(Block html) { } foundLog = readContainerLogs(html, logReader, logLimits, - desiredLogType); + desiredLogType, logUploadedTime); } catch (IOException ex) { LOG.error("Error getting logs for " + logEntity, ex); continue; @@ -189,7 +190,7 @@ protected void render(Block html) { private boolean readContainerLogs(Block html, AggregatedLogFormat.ContainerLogsReader logReader, LogLimits logLimits, - String desiredLogType) throws IOException { + String desiredLogType, long logUpLoadTime) throws IOException { int bufferSize = 65536; char[] cbuf = new char[bufferSize]; @@ -199,13 +200,12 @@ private boolean readContainerLogs(Block html, if (desiredLogType == null || desiredLogType.isEmpty() || desiredLogType.equals(logType)) { long logLength = logReader.getCurrentLogLength(); - long logUpLoadTime = logReader.getCurrentLogUpLoadTime(); if (foundLog) { html.pre()._("\n\n")._(); } html.p()._("Log Type: " + logType)._(); - html.p()._("Log UpLoadTime: " + Times.format(logUpLoadTime))._(); + html.p()._("Log UpLoad Time: " + Times.format(logUpLoadTime))._(); html.p()._("Log Length: " + Long.toString(logLength))._(); long start = logLimits.start < 0 diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java index bc0485e..61ee349 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java @@ -57,6 +57,7 @@ import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter; +import org.apache.hadoop.yarn.util.Times; import org.junit.After; import org.junit.Assume; import org.junit.Before; @@ -233,7 +234,7 @@ public void testReadAcontainerLogs1() throws Exception { LogKey rLogKey = new LogKey(); DataInputStream dis = logReader.next(rLogKey); Writer writer = new StringWriter(); - LogReader.readAcontainerLogs(dis, writer); + LogReader.readAcontainerLogs(dis, writer, System.currentTimeMillis()); // We should only do the log aggregation for stdout. // Since we could not open the fileInputStream for stderr, this file is not @@ -241,8 +242,8 @@ public void testReadAcontainerLogs1() throws Exception { String s = writer.toString(); int expectedLength = "\n\nLogType:stdout".length() - + ("\nLogUploadTime:" + System.currentTimeMillis()).length() - + ("\nLogLength:" + numChars).length() + + ("\nLog Upload Time:" + Times.format(System.currentTimeMillis())) + .length() + ("\nLogLength:" + numChars).length() + "\nLog Contents:\n".length() + numChars; Assert.assertTrue("LogType not matched", s.contains("LogType:stdout")); Assert.assertTrue("log file:stderr should not be aggregated.", !s.contains("LogType:stderr")); @@ -256,7 +257,7 @@ public void testReadAcontainerLogs1() throws Exception { } String expectedContent = sb.toString(); Assert.assertTrue("Log content incorrect", s.contains(expectedContent)); - + Assert.assertEquals(expectedLength, s.length()); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java index cea71fa..977bb0a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java @@ -767,22 +767,18 @@ private String verifyContainerLogs(LogAggregationService logAggregationService, Assert.assertEquals("LogType:", writtenLines[0].substring(0, 8)); String fileType = writtenLines[0].substring(9); - Assert.assertEquals("LogUploadTime:", writtenLines[1].substring(0, 14)); - String fileUploadedTimeStr = writtenLines[1].substring(15); - - Assert.assertEquals("LogLength:", writtenLines[2].substring(0, 10)); - String fileLengthStr = writtenLines[2].substring(11); + Assert.assertEquals("LogLength:", writtenLines[1].substring(0, 10)); + String fileLengthStr = writtenLines[1].substring(11); long fileLength = Long.parseLong(fileLengthStr); Assert.assertEquals("Log Contents:", - writtenLines[3].substring(0, 13)); + writtenLines[2].substring(0, 13)); String logContents = StringUtils.join( - Arrays.copyOfRange(writtenLines, 4, writtenLines.length), "\n"); + Arrays.copyOfRange(writtenLines, 3, writtenLines.length), "\n"); perContainerMap.put(fileType, logContents); LOG.info("LogType:" + fileType); - LOG.info("LogUploadTime:" + fileUploadedTimeStr); LOG.info("LogLength:" + fileLength); LOG.info("Log Contents:\n" + perContainerMap.get(fileType)); } catch (EOFException eof) {