diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
index 1f2088a..5aa4671 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JHAdminConfig.java
@@ -225,6 +225,19 @@
+ "jobname.limit";
public static final int DEFAULT_MR_HS_JOBNAME_LIMIT = 50;
+
+ /**
+ * CSRF settings.
+ */
+ public static final String MR_HISTORY_CSRF_PREFIX = MR_HISTORY_PREFIX +
+ "webapp.rest-csrf.";
+ public static final String MR_HISTORY_CSRF_ENABLED = MR_HISTORY_CSRF_PREFIX +
+ "enabled";
+ public static final String MR_HISTORY_CSRF_CUSTOM_HEADER =
+ MR_HISTORY_CSRF_PREFIX + "custom-header";
+ public static final String MR_HISTORY_METHODS_TO_IGNORE =
+ MR_HISTORY_CSRF_PREFIX + "methods-to-ignore";
+
/**
* Settings for .jhist file format.
*/
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
index dc5c3dd..1a5f731 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
@@ -1852,4 +1852,30 @@
default is -1
+
+
+ Enable the CSRF filter for the job history web app
+
+ mapreduce.jobhistory.webapp.rest-csrf.enabled
+ false
+
+
+
+
+ Optional parameter that indicates the custom header name to use for CSRF
+ protection.
+
+ mapreduce.jobhistory.webapp.rest-csrf.custom-header
+ X-XSRF-Header
+
+
+
+
+ Optional parameter that indicates the list of HTTP methods that do not
+ require CSRF protection
+
+ mapreduce.jobhistory.webapp.rest-csrf.methods-to-ignore
+ GET,OPTIONS,HEAD
+
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
index 3751ad9..2fbaade 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
@@ -160,6 +160,7 @@ protected void initializeWebApp(Configuration conf) {
JHAdminConfig.MR_WEBAPP_SPNEGO_KEYTAB_FILE_KEY)
.withHttpSpnegoPrincipalKey(
JHAdminConfig.MR_WEBAPP_SPNEGO_USER_NAME_KEY)
+ .withCSRFProtection(JHAdminConfig.MR_HISTORY_CSRF_PREFIX)
.at(NetUtils.getHostPortString(bindAddress)).start(webApp);
String connectHost = MRWebAppUtil.getJHSWebappURLWithoutScheme(conf).split(":")[0];
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index cef6932..61d1d72 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -2399,6 +2399,30 @@ public static boolean areNodeLabelsEnabled(
public static final String NM_SCRIPT_BASED_NODE_LABELS_PROVIDER_SCRIPT_OPTS =
NM_SCRIPT_BASED_NODE_LABELS_PROVIDER_PREFIX + "opts";
+ // RM and NM CSRF props
+ public static final String REST_CSRF = "webapp.rest-csrf.";
+ public static final String RM_CSRF_PREFIX = RM_PREFIX + REST_CSRF;
+ public static final String NM_CSRF_PREFIX = NM_PREFIX + REST_CSRF;
+ public static final String TIMELINE_CSRF_PREFIX = TIMELINE_SERVICE_PREFIX +
+ REST_CSRF;
+ public static final String RM_CSRF_ENABLED = RM_CSRF_PREFIX + "enabled";
+ public static final String NM_CSRF_ENABLED = NM_CSRF_PREFIX + "enabled";
+ public static final String TIMELINE_CSRF_ENABLED = TIMELINE_CSRF_PREFIX +
+ "enabled";
+ public static final String RM_CSRF_CUSTOM_HEADER = RM_CSRF_PREFIX +
+ "custom-header";
+ public static final String NM_CSRF_CUSTOM_HEADER = NM_CSRF_PREFIX +
+ "custom-header";
+ public static final String TIMELINE_CSRF_CUSTOM_HEADER =
+ TIMELINE_CSRF_PREFIX + "custom-header";
+ public static final String RM_CSRF_METHODS_TO_IGNORE = RM_CSRF_PREFIX +
+ "methods-to-ignore";
+ public static final String NM_CSRF_METHODS_TO_IGNORE = NM_CSRF_PREFIX +
+ "methods-to-ignore";
+ public static final String TIMELINE_CSRF_METHODS_TO_IGNORE =
+ TIMELINE_CSRF_PREFIX + "methods-to-ignore";
+
+
public YarnConfiguration() {
super();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
index 0c6edad..6144a0d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
@@ -39,6 +39,7 @@
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import org.slf4j.Logger;
@@ -73,6 +74,7 @@
public class WebApps {
static final Logger LOG = LoggerFactory.getLogger(WebApps.class);
public static class Builder {
+
static class ServletStruct {
public Class extends HttpServlet> clazz;
public String name;
@@ -91,6 +93,7 @@
boolean devMode = false;
private String spnegoPrincipalKey;
private String spnegoKeytabKey;
+ private String configPrefix;
private final HashSet servlets = new HashSet();
private final HashMap attributes = new HashMap();
@@ -161,6 +164,18 @@
return this;
}
+ /**
+ * Enable the CSRF filter.
+ * @param csrfConfigPrefix The config prefix that identifies the
+ * CSRF parameters applicable for this filter
+ * instance.
+ * @return the Builder instance
+ */
+ public Builder withCSRFProtection(String csrfConfigPrefix) {
+ this.configPrefix = csrfConfigPrefix;
+ return this;
+ }
+
public Builder inDevMode() {
devMode = true;
return this;
@@ -266,6 +281,19 @@ public void setup() {
for(Map.Entry entry : attributes.entrySet()) {
server.setAttribute(entry.getKey(), entry.getValue());
}
+ Map params = getCsrfConfigParameters();
+
+ if (hasCSRFEnabled(params)) {
+ LOG.info("CSRF Protection has been enabled for the {} application. "
+ + "Please ensure that there is an authentication mechanism "
+ + "enabled (kerberos, custom, etc).",
+ name);
+ String restCsrfClassName = RestCsrfPreventionFilter.class.getName();
+ HttpServer2.defineFilter(server.getWebAppContext(), restCsrfClassName,
+ restCsrfClassName, params,
+ new String[] {"/*"});
+ }
+
HttpServer2.defineFilter(server.getWebAppContext(), "guice",
GuiceFilter.class.getName(), null, new String[] { "/*" });
@@ -295,6 +323,20 @@ protected void configure() {
return webapp;
}
+ private boolean hasCSRFEnabled(Map params) {
+ return params != null && Boolean.valueOf(params.get("enabled"));
+ }
+
+ private Map getCsrfConfigParameters() {
+ Map params = null;
+ if (configPrefix != null) {
+ // need to obtain parameters for CSRF filter
+ params =
+ RestCsrfPreventionFilter.getFilterParams(conf, configPrefix);
+ }
+ return params;
+ }
+
public WebApp start() {
return start(null);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index cc08802..ea1afe4 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -2637,4 +2637,82 @@
yarn.node-labels.fs-store.impl.class
org.apache.hadoop.yarn.nodelabels.FileSystemNodeLabelsStore
+
+
+
+ Enable the CSRF filter for the RM web app
+
+ yarn.resourcemanager.webapp.rest-csrf.enabled
+ false
+
+
+
+
+ Optional parameter that indicates the custom header name to use for CSRF
+ protection.
+
+ yarn.resourcemanager.webapp.rest-csrf.custom-header
+ X-XSRF-Header
+
+
+
+
+ Optional parameter that indicates the list of HTTP methods that do not
+ require CSRF protection
+
+ yarn.resourcemanager.webapp.rest-csrf.methods-to-ignore
+ GET,OPTIONS,HEAD
+
+
+
+
+ Enable the CSRF filter for the NM web app
+
+ yarn.nodemanager.webapp.rest-csrf.enabled
+ false
+
+
+
+
+ Optional parameter that indicates the custom header name to use for CSRF
+ protection.
+
+ yarn.nodemanager.webapp.rest-csrf.custom-header
+ X-XSRF-Header
+
+
+
+
+ Optional parameter that indicates the list of HTTP methods that do not
+ require CSRF protection
+
+ yarn.nodemanager.webapp.rest-csrf.methods-to-ignore
+ GET,OPTIONS,HEAD
+
+
+
+
+ Enable the CSRF filter for the timeline service web app
+
+ yarn.timeline-service.webapp.rest-csrf.enabled
+ false
+
+
+
+
+ Optional parameter that indicates the custom header name to use for CSRF
+ protection.
+
+ yarn.timeline-service.webapp.rest-csrf.custom-header
+ X-XSRF-Header
+
+
+
+
+ Optional parameter that indicates the list of HTTP methods that do not
+ require CSRF protection
+
+ yarn.timeline-service.webapp.rest-csrf.methods-to-ignore
+ GET,OPTIONS,HEAD
+
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
index f4fe140..10841b7 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -69,7 +69,7 @@
public static final int SHUTDOWN_HOOK_PRIORITY = 30;
private static final Log LOG = LogFactory
- .getLog(ApplicationHistoryServer.class);
+ .getLog(ApplicationHistoryServer.class);
private ApplicationHistoryClientService ahsClientService;
private ApplicationACLsManager aclsManager;
@@ -89,7 +89,8 @@ protected void serviceInit(Configuration conf) throws Exception {
// init timeline services first
timelineStore = createTimelineStore(conf);
addIfService(timelineStore);
- secretManagerService = createTimelineDelegationTokenSecretManagerService(conf);
+ secretManagerService =
+ createTimelineDelegationTokenSecretManagerService(conf);
addService(secretManagerService);
timelineDataManager = createTimelineDataManager(conf);
addService(timelineDataManager);
@@ -113,7 +114,7 @@ protected void serviceInit(Configuration conf) throws Exception {
protected void serviceStart() throws Exception {
try {
doSecureLogin(getConfig());
- } catch(IOException ie) {
+ } catch (IOException ie) {
throw new YarnRuntimeException("Failed to login", ie);
}
@@ -131,8 +132,7 @@ protected void serviceStop() throws Exception {
}
@Private
- @VisibleForTesting
- ApplicationHistoryClientService getClientService() {
+ @VisibleForTesting ApplicationHistoryClientService getClientService() {
return this.ahsClientService;
}
@@ -156,22 +156,21 @@ public TimelineStore getTimelineStore() {
}
@Private
- @VisibleForTesting
- ApplicationHistoryManager getApplicationHistoryManager() {
+ @VisibleForTesting ApplicationHistoryManager getApplicationHistoryManager() {
return this.historyManager;
}
static ApplicationHistoryServer launchAppHistoryServer(String[] args) {
Thread
- .setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
+ .setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
StringUtils.startupShutdownMessage(ApplicationHistoryServer.class, args,
- LOG);
+ LOG);
ApplicationHistoryServer appHistoryServer = null;
try {
appHistoryServer = new ApplicationHistoryServer();
ShutdownHookManager.get().addShutdownHook(
- new CompositeServiceShutdownHook(appHistoryServer),
- SHUTDOWN_HOOK_PRIORITY);
+ new CompositeServiceShutdownHook(appHistoryServer),
+ SHUTDOWN_HOOK_PRIORITY);
YarnConfiguration conf = new YarnConfiguration();
new GenericOptionsParser(conf, args);
appHistoryServer.init(conf);
@@ -188,8 +187,8 @@ public static void main(String[] args) {
}
private ApplicationHistoryClientService
- createApplicationHistoryClientService(
- ApplicationHistoryManager historyManager) {
+ createApplicationHistoryClientService(
+ ApplicationHistoryManager historyManager) {
return new ApplicationHistoryClientService(historyManager);
}
@@ -223,7 +222,7 @@ private TimelineStore createTimelineStore(
}
private TimelineDelegationTokenSecretManagerService
- createTimelineDelegationTokenSecretManagerService(Configuration conf) {
+ createTimelineDelegationTokenSecretManagerService(Configuration conf) {
return new TimelineDelegationTokenSecretManagerService();
}
@@ -250,15 +249,16 @@ private void startWebApp() {
initializers == null || initializers.length() == 0 ? "" : initializers;
if (!initializers.contains(CrossOriginFilterInitializer.class.getName())) {
- if(conf.getBoolean(YarnConfiguration
+ if (conf.getBoolean(YarnConfiguration
.TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED, YarnConfiguration
- .TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED_DEFAULT)) {
- if (initializers.contains(HttpCrossOriginFilterInitializer.class.getName())) {
+ .TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED_DEFAULT)) {
+ if (initializers
+ .contains(HttpCrossOriginFilterInitializer.class.getName())) {
initializers =
- initializers.replaceAll(HttpCrossOriginFilterInitializer.class.getName(),
- CrossOriginFilterInitializer.class.getName());
- }
- else {
+ initializers
+ .replaceAll(HttpCrossOriginFilterInitializer.class.getName(),
+ CrossOriginFilterInitializer.class.getName());
+ } else {
if (initializers.length() != 0) {
initializers += ",";
}
@@ -269,7 +269,7 @@ private void startWebApp() {
}
if (!initializers.contains(TimelineAuthenticationFilterInitializer.class
- .getName())) {
+ .getName())) {
if (initializers.length() != 0) {
initializers += ",";
}
@@ -282,7 +282,7 @@ private void startWebApp() {
for (String filterInitializer : parts) {
filterInitializer = filterInitializer.trim();
if (filterInitializer.equals(AuthenticationFilterInitializer.class
- .getName())) {
+ .getName())) {
modifiedInitializers = true;
continue;
}
@@ -294,48 +294,54 @@ private void startWebApp() {
conf.set("hadoop.http.filter.initializers", actualInitializers);
}
String bindAddress = WebAppUtils.getWebAppBindURL(conf,
- YarnConfiguration.TIMELINE_SERVICE_BIND_HOST,
- WebAppUtils.getAHSWebAppURLWithoutScheme(conf));
+ YarnConfiguration.TIMELINE_SERVICE_BIND_HOST,
+ WebAppUtils.getAHSWebAppURLWithoutScheme(conf));
try {
- AHSWebApp ahsWebApp = new AHSWebApp(timelineDataManager, ahsClientService);
+ AHSWebApp ahsWebApp =
+ new AHSWebApp(timelineDataManager, ahsClientService);
webApp =
WebApps
- .$for("applicationhistory", ApplicationHistoryClientService.class,
- ahsClientService, "ws")
- .with(conf).withAttribute(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
- conf.get(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS)).at(bindAddress).build(ahsWebApp);
- HttpServer2 httpServer = webApp.httpServer();
-
- String[] names = conf.getTrimmedStrings(YarnConfiguration.TIMELINE_SERVICE_UI_NAMES);
- WebAppContext webAppContext = httpServer.getWebAppContext();
-
- for (String name : names) {
- String webPath = conf.get(
- YarnConfiguration.TIMELINE_SERVICE_UI_WEB_PATH_PREFIX + name);
- String onDiskPath = conf.get(
- YarnConfiguration.TIMELINE_SERVICE_UI_ON_DISK_PATH_PREFIX + name);
- WebAppContext uiWebAppContext = new WebAppContext();
- uiWebAppContext.setContextPath(webPath);
- uiWebAppContext.setWar(onDiskPath);
- final String[] ALL_URLS = { "/*" };
- FilterHolder[] filterHolders =
- webAppContext.getServletHandler().getFilters();
- for (FilterHolder filterHolder: filterHolders) {
- if (!"guice".equals(filterHolder.getName())) {
- HttpServer2.defineFilter(uiWebAppContext, filterHolder.getName(),
- filterHolder.getClassName(), filterHolder.getInitParameters(),
- ALL_URLS);
- }
- }
- LOG.info("Hosting " + name + " from " + onDiskPath + " at " + webPath);
- httpServer.addContext(uiWebAppContext, true);
- }
- httpServer.start();
- conf.updateConnectAddr(YarnConfiguration.TIMELINE_SERVICE_BIND_HOST,
- YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
- YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS,
- this.getListenerAddress());
- LOG.info("Instantiating AHSWebApp at " + getPort());
+ .$for("applicationhistory", ApplicationHistoryClientService.class,
+ ahsClientService, "ws")
+ .with(conf)
+ .withAttribute(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
+ conf.get(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS))
+ .withCSRFProtection(YarnConfiguration.TIMELINE_CSRF_PREFIX)
+ .at(bindAddress).build(ahsWebApp);
+ HttpServer2 httpServer = webApp.httpServer();
+
+ String[] names =
+ conf.getTrimmedStrings(YarnConfiguration.TIMELINE_SERVICE_UI_NAMES);
+ WebAppContext webAppContext = httpServer.getWebAppContext();
+
+ for (String name : names) {
+ String webPath = conf.get(
+ YarnConfiguration.TIMELINE_SERVICE_UI_WEB_PATH_PREFIX + name);
+ String onDiskPath = conf.get(
+ YarnConfiguration.TIMELINE_SERVICE_UI_ON_DISK_PATH_PREFIX + name);
+ WebAppContext uiWebAppContext = new WebAppContext();
+ uiWebAppContext.setContextPath(webPath);
+ uiWebAppContext.setWar(onDiskPath);
+ final String[] ALL_URLS = { "/*" };
+ FilterHolder[] filterHolders =
+ webAppContext.getServletHandler().getFilters();
+ for (FilterHolder filterHolder : filterHolders) {
+ if (!"guice".equals(filterHolder.getName())) {
+ HttpServer2.defineFilter(uiWebAppContext, filterHolder.getName(),
+ filterHolder.getClassName(), filterHolder.getInitParameters(),
+ ALL_URLS);
+ }
+ }
+ LOG.info("Hosting " + name + " from " + onDiskPath + " at " + webPath);
+ httpServer.addContext(uiWebAppContext, true);
+ }
+ httpServer.start();
+ conf.updateConnectAddr(
+ YarnConfiguration.TIMELINE_SERVICE_BIND_HOST,
+ YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
+ YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS,
+ this.getListenerAddress());
+ LOG.info("Instantiating AHSWebApp at " + getPort());
} catch (Exception e) {
String msg = "AHSWebApp failed to start.";
LOG.error(msg, e);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java
index 319c10c..827e1b5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java
@@ -79,6 +79,7 @@ protected void serviceStart() throws Exception {
YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY)
.withHttpSpnegoKeytabKey(
YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY)
+ .withCSRFProtection(YarnConfiguration.NM_CSRF_PREFIX)
.start(this.nmWebApp);
this.port = this.webApp.httpServer().getConnectorAddress(0).getPort();
} catch (Exception e) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
index 80b33a3..2744bb4 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
@@ -1058,6 +1058,7 @@ protected void startWepApp() {
YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY)
.withHttpSpnegoKeytabKey(
YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY)
+ .withCSRFProtection(YarnConfiguration.RM_CSRF_PREFIX)
.at(webAppAddress);
String proxyHostAndPort = WebAppUtils.getProxyHostAndPort(conf);
if(WebAppUtils.getResolvedRMWebAppURLWithoutScheme(conf).
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/webapp/TestRMWithCSRFFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/webapp/TestRMWithCSRFFilter.java
new file mode 100644
index 0000000..2efbd2d
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/webapp/TestRMWithCSRFFilter.java
@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.webapp;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
+import org.apache.hadoop.service.Service.STATE;
+import org.apache.hadoop.util.VersionInfo;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.resourcemanager.ClusterMetrics;
+import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
+import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.JAXBContextResolver;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.RMWebServices;
+import org.apache.hadoop.yarn.util.YarnVersionInfo;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import java.io.StringReader;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/**
+ * Used TestRMWebServices as an example of web invocations of RM and added
+ * test for CSRF Filter.
+ */
+public class TestRMWithCSRFFilter extends JerseyTestBase {
+
+ private static MockRM rm;
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+ bind(JAXBContextResolver.class);
+ bind(RMWebServices.class);
+ bind(GenericExceptionHandler.class);
+ Configuration conf = new Configuration();
+ conf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class,
+ ResourceScheduler.class);
+ rm = new MockRM(conf);
+ bind(ResourceManager.class).toInstance(rm);
+ serve("/*").with(GuiceContainer.class);
+ RestCsrfPreventionFilter csrfFilter = new RestCsrfPreventionFilter();
+ Map initParams = new HashMap<>();
+ // adding GET as protected method to make things a little easier...
+ initParams.put(RestCsrfPreventionFilter.CUSTOM_METHODS_TO_IGNORE_PARAM,
+ "OPTIONS,HEAD,TRACE");
+ filter("/*").through(csrfFilter, initParams);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ }
+
+ public TestRMWithCSRFFilter() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.yarn.server.resourcemanager.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testNoCustomHeaderFromBrowser() throws Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("cluster")
+ .path("info").accept("application/xml")
+ .header(RestCsrfPreventionFilter.HEADER_USER_AGENT,"Mozilla/5.0")
+ .get(ClientResponse.class);
+ assertTrue("Should have been rejected", response.getStatus() ==
+ Status.BAD_REQUEST.getStatusCode());
+ }
+
+ @Test
+ public void testIncludeCustomHeaderFromBrowser() throws Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("cluster")
+ .path("info").accept("application/xml")
+ .header(RestCsrfPreventionFilter.HEADER_USER_AGENT,"Mozilla/5.0")
+ .header("X-XSRF-HEADER", "")
+ .get(ClientResponse.class);
+ assertTrue("Should have been accepted", response.getStatus() ==
+ Status.OK.getStatusCode());
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ verifyClusterInfoXML(xml);
+ }
+
+ @Test
+ public void testAllowedMethod() throws Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("cluster")
+ .path("info").accept("application/xml")
+ .header(RestCsrfPreventionFilter.HEADER_USER_AGENT,"Mozilla/5.0")
+ .head();
+ assertTrue("Should have been allowed", response.getStatus() ==
+ Status.OK.getStatusCode());
+ }
+
+ @Test
+ public void testAllowNonBrowserInteractionWithoutHeader() throws Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("cluster")
+ .path("info").accept("application/xml")
+ .get(ClientResponse.class);
+ assertTrue("Should have been accepted", response.getStatus() ==
+ Status.OK.getStatusCode());
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ verifyClusterInfoXML(xml);
+ }
+
+ public void verifyClusterInfoXML(String xml) throws Exception {
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList nodes = dom.getElementsByTagName("clusterInfo");
+ assertEquals("incorrect number of elements", 1, nodes.getLength());
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+
+ verifyClusterGeneric(WebServicesTestUtils.getXmlLong(element, "id"),
+ WebServicesTestUtils.getXmlLong(element, "startedOn"),
+ WebServicesTestUtils.getXmlString(element, "state"),
+ WebServicesTestUtils.getXmlString(element, "haState"),
+ WebServicesTestUtils.getXmlString(
+ element, "haZooKeeperConnectionState"),
+ WebServicesTestUtils.getXmlString(element, "hadoopVersionBuiltOn"),
+ WebServicesTestUtils.getXmlString(element, "hadoopBuildVersion"),
+ WebServicesTestUtils.getXmlString(element, "hadoopVersion"),
+ WebServicesTestUtils.getXmlString(element,
+ "resourceManagerVersionBuiltOn"),
+ WebServicesTestUtils.getXmlString(element,
+ "resourceManagerBuildVersion"),
+ WebServicesTestUtils.getXmlString(element, "resourceManagerVersion"));
+ }
+ }
+
+ public void verifyClusterGeneric(long clusterid, long startedon,
+ String state, String haState,
+ String haZooKeeperConnectionState,
+ String hadoopVersionBuiltOn,
+ String hadoopBuildVersion,
+ String hadoopVersion,
+ String resourceManagerVersionBuiltOn,
+ String resourceManagerBuildVersion,
+ String resourceManagerVersion) {
+
+ assertEquals("clusterId doesn't match: ",
+ ResourceManager.getClusterTimeStamp(), clusterid);
+ assertEquals("startedOn doesn't match: ",
+ ResourceManager.getClusterTimeStamp(), startedon);
+ assertTrue("stated doesn't match: " + state,
+ state.matches(STATE.INITED.toString()));
+ assertTrue("HA state doesn't match: " + haState,
+ haState.matches("INITIALIZING"));
+
+ WebServicesTestUtils.checkStringMatch("hadoopVersionBuiltOn",
+ VersionInfo.getDate(), hadoopVersionBuiltOn);
+ WebServicesTestUtils.checkStringEqual("hadoopBuildVersion",
+ VersionInfo.getBuildVersion(), hadoopBuildVersion);
+ WebServicesTestUtils.checkStringMatch("hadoopVersion",
+ VersionInfo.getVersion(), hadoopVersion);
+
+ WebServicesTestUtils.checkStringMatch("resourceManagerVersionBuiltOn",
+ YarnVersionInfo.getDate(),
+ resourceManagerVersionBuiltOn);
+ WebServicesTestUtils.checkStringEqual("resourceManagerBuildVersion",
+ YarnVersionInfo.getBuildVersion(), resourceManagerBuildVersion);
+ WebServicesTestUtils.checkStringMatch("resourceManagerVersion",
+ YarnVersionInfo.getVersion(),
+ resourceManagerVersion);
+ }
+
+}