diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java index 02b2ab0..148f1f1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java @@ -21,6 +21,9 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; +import java.nio.channels.Channels; +import java.nio.channels.FileChannel; +import java.nio.channels.WritableByteChannel; import java.util.Map.Entry; import javax.servlet.http.HttpServletRequest; @@ -236,17 +239,20 @@ public Response getLogs(@PathParam("containerid") String containerIdStr, try { final FileInputStream fis = ContainerLogsUtils.openLogFileForRead( containerIdStr, logFile, nmContext); - + final FileChannel inputChannel = fis.getChannel(); StreamingOutput stream = new StreamingOutput() { @Override public void write(OutputStream os) throws IOException, WebApplicationException { + WritableByteChannel outputChannel = Channels.newChannel(os); try { - int bufferSize = 65536; - byte[] buf = new byte[bufferSize]; - int len; - while ((len = fis.read(buf, 0, bufferSize)) > 0) { - os.write(buf, 0, len); + long remaining = inputChannel.size(); + long position = 0; + while (remaining > 0) { + long transferred = + inputChannel.transferTo(position, remaining, outputChannel); + remaining -= transferred; + position += transferred; } os.flush(); } finally {