diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml
index d6ff6af..c5a59ad 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml
@@ -38,6 +38,21 @@
+ org.apache.hadoop
+ hadoop-minikdc
+ test
+
+
+
+ org.apache.hadoop
+ hadoop-auth
+ test
+ test-jar
+ ${project.version}
+
+
+
+ com.google.guavaguava
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/RmDtFetcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/RmDtFetcher.java
new file mode 100644
index 0000000..e0791ed
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/RmDtFetcher.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
+import javax.ws.rs.core.MediaType;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.DtFetcher;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
+
+import com.sun.jersey.api.client.ClientResponse.Status;
+
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+
+/**
+ * DtFetcher is an interface which permits the abstraction and separation of
+ * delegation token fetch implementaions across different packages and
+ * compilation units. Resolution of fetcher impl will be done at runtime.
+ */
+public class RmDtFetcher implements DtFetcher {
+ private static final Log LOG = LogFactory.getLog(RmDtFetcher.class);
+
+ private static final String SERVICE_NAME = "rm";
+
+ /**
+ * Returns the service name, which is also a valid URL prefix.
+ */
+ public Text getServiceName() {
+ return new Text(SERVICE_NAME);
+ }
+
+ public boolean isTokenRequired() {
+ return UserGroupInformation.isSecurityEnabled();
+ }
+
+ private String getJSONTokenString(BufferedReader reader)
+ throws JSONException, IOException {
+ String line = reader.readLine();
+ while (line != null) {
+ JSONObject obj = new JSONObject(line);
+ if (obj.has("token")) {
+ return obj.getString("token");
+ }
+ line = reader.readLine();
+ }
+ return null;
+ }
+
+ private String processJSONTransaction(HttpURLConnection conn, String method,
+ String contentType, String body) throws IOException {
+ // send request
+ conn.setRequestMethod(method);
+ conn.setDoOutput(true);
+ conn.setRequestProperty("Accept-Charset", "UTF8");
+ if (contentType != null && !contentType.isEmpty()) {
+ conn.setRequestProperty("Content-Type", contentType + ";charset=UTF8");
+ if (body != null && !body.isEmpty()) {
+ OutputStream stream = conn.getOutputStream();
+ stream.write(body.getBytes("UTF8"));
+ stream.close();
+ }
+ }
+ // process response
+ String token = null;
+ InputStream response = null;
+ BufferedReader reader = null;
+ try {
+ response = conn.getInputStream();
+ if (Status.OK.getStatusCode() != conn.getResponseCode()) {
+ throw new IOException("HTTP status not OK: " + conn.getResponseCode());
+ }
+ reader = new BufferedReader(new InputStreamReader(response, "UTF8"));
+ token = getJSONTokenString(reader);
+ } catch (JSONException jex) {
+ LOG.warn("JSON message failed to parse: " + jex.getMessage());
+ } finally {
+ IOUtils.closeQuietly(reader);
+ IOUtils.closeQuietly(response);
+ }
+ return token;
+ }
+
+ private Token getRMDelegationToken(
+ String renewer, URL url) throws IOException {
+ String body = String.format("{\"renewer\":\"%s\"}", renewer);
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+ String urlEncodedToken = processJSONTransaction(
+ conn, "POST", MediaType.APPLICATION_JSON, body);
+ if (urlEncodedToken == null) {
+ throw new IOException("Not able to fetch a valid token reponse.");
+ }
+ Token token =
+ new Token();
+ token.decodeFromUrlString(urlEncodedToken);
+ return token;
+ }
+
+ /**
+ * Returns RMDelegationToken object via REST API.
+ * This implementation only supports the web app. The supplied URL should
+ * point to the host:port of the resource manager web app only.
+ *
+ * @param conf - ignored.
+ * @param creds - Credentials object to which token(s) will be added.
+ * @param renewer - String object holding the renewer.
+ * @param url - url to host:port, e.g. "foo.com:8088".
+ * If url is null, "localhost:8088" is used.
+ * @return Token<RMDelegationTokenIdentifier>
+ * @throws Exception
+ */
+ public Token> addDelegationTokens(Configuration conf, Credentials creds,
+ String renewer, String url) throws Exception {
+ if (url == null) {
+ url = "localhost:8088";
+ }
+ Token token = getRMDelegationToken(
+ (renewer == null) ? "" : renewer,
+ new URL("http://" + url + "/ws/v1/cluster/delegation-token"));
+ creds.addToken(token.getService(), token);
+ return token;
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/TimelineDtFetcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/TimelineDtFetcher.java
new file mode 100644
index 0000000..4c979e5
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/TimelineDtFetcher.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import java.net.URI;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.DtFetcher;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.yarn.client.api.TimelineClient;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
+
+/**
+ * DtFetcher is an interface which permits the abstraction and separation of
+ * delegation token fetch implementaions across different packages and
+ * compilation units. Resolution of fetcher impl will be done at runtime.
+ */
+public class TimelineDtFetcher implements DtFetcher {
+ private static final Log LOG = LogFactory.getLog(TimelineDtFetcher.class);
+
+ private static final String SERVICE_NAME = "timeline";
+
+ /**
+ * Returns the service name, which is also a valid URL prefix.
+ */
+ public Text getServiceName() {
+ return new Text(SERVICE_NAME);
+ }
+
+ public boolean isTokenRequired() {
+ return UserGroupInformation.isSecurityEnabled();
+ }
+
+ /**
+ * Returns TimelineDelegationToken object via TimelineClient.
+ * @param conf - Configuration object used to init TimelineClient.
+ * @param creds - Credentials object to which token(s) will be added.
+ * @param renewer - String object holding the renewer.
+ * @param url - String url overrides TIMELINE_SERVICE_WEBAPP_ADDRESS in conf.
+ * Example timeline DT URL: timeline://localhost:8188
+ * @return TimelineDelegationToken with renewer.
+ * @throws Exception
+ */
+ public Token> addDelegationTokens(Configuration conf, Credentials creds,
+ String renewer, String url) throws Exception {
+ YarnConfiguration yarnConf = new YarnConfiguration(conf);
+ if (renewer == null) {
+ renewer = "";
+ }
+ if (url != null) {
+ URI uri = new URI(url);
+ yarnConf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
+ uri.getHost() + ":" + uri.getPort());
+ }
+ Token token = null;
+ try(TimelineClient timelineClient = TimelineClient.createTimelineClient()) {
+ timelineClient.init(yarnConf);
+ timelineClient.start();
+ token = timelineClient.getDelegationToken(renewer);
+ creds.addToken(token.getService(), token);
+ }
+ return token;
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher
new file mode 100644
index 0000000..4322e78
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+org.apache.hadoop.yarn.util.RmDtFetcher
+org.apache.hadoop.yarn.util.TimelineDtFetcher
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestRmDtFetcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestRmDtFetcher.java
new file mode 100644
index 0000000..407bcca
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestRmDtFetcher.java
@@ -0,0 +1,165 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import java.io.File;
+import java.util.concurrent.Callable;
+import java.util.ServiceLoader;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.AuthenticationFilterInitializer;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.KerberosTestUtils;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.hadoop.security.token.DtFetcher;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
+import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test class for RmDtFetcher implementation.
+ */
+public class TestRmDtFetcher {
+ private static File kdcDir = new File("target",
+ TestRmDtFetcher.class.getName() + "-root");
+ private static File keytabFile = new File(KerberosTestUtils.getKeytabFile());
+ private static String keytabPath = keytabFile.getAbsolutePath();
+ private static String principal = KerberosTestUtils.getServerPrincipal();
+ private static MiniKdc kdc = null;
+ private static MockRM rm = null;
+
+ @BeforeClass
+ public static void setUp() {
+ try {
+ setupKdc();
+ setupRM();
+ } catch (Exception e) {
+ assertTrue("Couldn't create MiniKdc", false);
+ }
+ }
+
+ @AfterClass
+ public static void tearDown() {
+ if (kdc != null) {
+ kdc.stop();
+ }
+ if (rm != null) {
+ rm.stop();
+ }
+ }
+
+ private static void setupRM() throws Exception {
+ Configuration rmconf = new Configuration();
+ rmconf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
+ YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS);
+ rmconf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class,
+ ResourceScheduler.class);
+ rmconf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
+ String httpPrefix = "hadoop.http.authentication.";
+ rmconf.setStrings(httpPrefix + "type", "kerberos");
+ rmconf.set(httpPrefix + KerberosAuthenticationHandler.PRINCIPAL, principal);
+ rmconf.set(httpPrefix + KerberosAuthenticationHandler.KEYTAB, keytabPath);
+ // use any file for signature secret
+ rmconf.set(httpPrefix + AuthenticationFilter.SIGNATURE_SECRET + ".file",
+ keytabPath);
+ rmconf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
+ "kerberos");
+ rmconf.setBoolean(YarnConfiguration.RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER,
+ true);
+ rmconf.set("hadoop.http.filter.initializers",
+ AuthenticationFilterInitializer.class.getName());
+ rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, principal);
+ rmconf.set(YarnConfiguration.RM_KEYTAB, keytabPath);
+ rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytabPath);
+ rmconf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, principal);
+ rmconf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytabPath);
+ rmconf.setBoolean("mockrm.webapp.enabled", true);
+ rmconf.set("yarn.resourcemanager.proxyuser.client.hosts", "*");
+ rmconf.set("yarn.resourcemanager.proxyuser.client.groups", "*");
+ UserGroupInformation.setConfiguration(rmconf);
+ rm = new MockRM(rmconf);
+ rm.start();
+ }
+
+ private static void setupKdc() throws Exception {
+ kdc = new MiniKdc(MiniKdc.createConf(), kdcDir);
+ kdc.start();
+ kdc.createPrincipal(keytabFile, "HTTP/localhost", "client",
+ UserGroupInformation.getLoginUser().getShortUserName(), "client2");
+ }
+
+ private static Void bodyForTest() throws Exception {
+ RmDtFetcher fetcher = loadRmDtFetcher();
+ assertTrue("RM DtFetcher service must load", fetcher != null);
+ Credentials ts = new Credentials();
+ assertEquals("Credentials has zero tokens", 0, ts.numberOfTokens());
+ Token> token = fetcher.addDelegationTokens(new Configuration(), ts,
+ UserGroupInformation.getLoginUser().getShortUserName(), null);
+ assertTrue("RM delegation token is not null", token != null);
+ assertEquals("RM kind is correct",
+ token.getKind(), RMDelegationTokenIdentifier.KIND_NAME);
+ assertEquals("Credentials has one token", 1, ts.numberOfTokens());
+ return null;
+ }
+
+ public static RmDtFetcher loadRmDtFetcher() {
+ ServiceLoader loader = ServiceLoader.load(DtFetcher.class);
+ for (DtFetcher fetcher : loader) {
+ Text serviceName = fetcher.getServiceName();
+ if (serviceName.equals(new Text("rm"))) {
+ return (RmDtFetcher)fetcher;
+ }
+ }
+ return null;
+ }
+
+ @Test
+ public void testRmDtFetcherFetchAsClient() throws Exception {
+ KerberosTestUtils.doAsClient(new Callable() {
+ @Override
+ public Void call() throws Exception {
+ return bodyForTest();
+ }
+ });
+ }
+
+ @Test
+ public void testRmDtFetcherFetchAsServer() throws Exception {
+ KerberosTestUtils.doAs("HTTP/localhost", new Callable() {
+ @Override
+ public Void call() throws Exception {
+ return bodyForTest();
+ }
+ });
+ }
+}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestTimelineDtFetcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestTimelineDtFetcher.java
new file mode 100644
index 0000000..4185e3c
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestTimelineDtFetcher.java
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.concurrent.Callable;
+import java.util.ServiceLoader;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.KerberosTestUtils;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.token.DtFetcher;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer;
+import org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore;
+import org.apache.hadoop.yarn.server.timeline.TimelineStore;
+import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Test class for TimelineDtFetcher.
+ */
+@RunWith(Parameterized.class)
+public class TestTimelineDtFetcher {
+ public static final Text SERVICE_NAME = new Text("timeline");
+ private static final String HTTP_USER = "HTTP";
+ private static final String BASE_DIR = System.getProperty("test.build.dir",
+ "target/test-dir") + "/" + TestTimelineDtFetcher.class.getSimpleName();
+
+ private static File keytabFile = new File(KerberosTestUtils.getKeytabFile());
+ private static String principal = KerberosTestUtils.getServerPrincipal();
+
+ private static MiniKdc kdc;
+ private static String keystoresDir;
+ private static String sslConfDir;
+ private static ApplicationHistoryServer timelineServer;
+ private static Configuration conf;
+ private static boolean withSsl;
+
+ public TestTimelineDtFetcher(boolean withSsl) {
+ TestTimelineDtFetcher.withSsl = withSsl;
+ }
+
+ @Parameterized.Parameters
+ public static Collection