diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SpillRecord.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SpillRecord.java index 93a2d04..3436d03 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SpillRecord.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SpillRecord.java @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.io.SecureIOUtils; import org.apache.hadoop.util.PureJavaCrc32; @InterfaceAudience.LimitedPrivate({"MapReduce"}) @@ -65,7 +66,10 @@ public SpillRecord(Path indexFileName, JobConf job, Checksum crc, throws IOException { final FileSystem rfs = FileSystem.getLocal(job).getRaw(); - final FSDataInputStream in = rfs.open(indexFileName); + final FSDataInputStream in = + SecureIOUtils.openFSDataInputStream(rfs, indexFileName, + expectedIndexOwner, null); + try { final long length = rfs.getFileStatus(indexFileName).getLen(); final int partitions = (int) length / MAP_OUTPUT_INDEX_RECORD_LENGTH; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java index 0beb430..9eab57c 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java @@ -58,6 +58,7 @@ import org.apache.hadoop.io.DataInputByteBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.ReadaheadPool; +import org.apache.hadoop.io.SecureIOUtils; import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; import org.apache.hadoop.security.ssl.SSLFactory; @@ -572,7 +573,7 @@ protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, final File spillfile = new File(mapOutputFileName.toString()); RandomAccessFile spill; try { - spill = new RandomAccessFile(spillfile, "r"); + spill = SecureIOUtils.openForRandomRead(spillfile, "r", user, null); } catch (FileNotFoundException e) { LOG.info(spillfile + " not found"); return null; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java index 5fdd957..3ea9827 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java @@ -39,8 +39,8 @@ import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.SecureIOUtils; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -52,8 +52,8 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.util.ConverterUtils; -import org.apache.hadoop.yarn.webapp.YarnWebParams; import org.apache.hadoop.yarn.webapp.SubView; +import org.apache.hadoop.yarn.webapp.YarnWebParams; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.PRE; import org.apache.hadoop.yarn.webapp.view.HtmlBlock; @@ -238,7 +238,8 @@ private void printLogs(Block html, ContainerId containerId, } // TODO: Use secure IO Utils to avoid symlink attacks. // TODO Fix findBugs close warning along with IOUtils change - logByteStream = new FileInputStream(logFile); + logByteStream = + SecureIOUtils.openForRead(logFile, application.getUser(), null); IOUtils.skipFully(logByteStream, start); InputStreamReader reader = new InputStreamReader(logByteStream); @@ -260,8 +261,11 @@ private void printLogs(Block html, ContainerId containerId, reader.close(); } catch (IOException e) { - html.h1("Exception reading log-file. Log file was likely aggregated. " - + StringUtils.stringifyException(e)); + LOG.error( + "Exception reading log file " + logFile.getAbsolutePath(), e); + html.h1("Exception Reading log file. It might be because log file was" + + " aggregated or user does not have permissions to read log " + + "file : " + logFile.getAbsolutePath()); } finally { if (logByteStream != null) { try {