diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml index a53ec57..2b0f13e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/pom.xml @@ -40,6 +40,21 @@ + org.apache.hadoop + hadoop-minikdc + test + + + + org.apache.hadoop + hadoop-auth + test + test-jar + ${project.version} + + + + com.google.guava guava diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/RmDtFetcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/RmDtFetcher.java new file mode 100644 index 0000000..faaffc7 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/RmDtFetcher.java @@ -0,0 +1,162 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.util; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.net.URL; + +import javax.ws.rs.core.MediaType; + +import org.apache.commons.io.IOUtils; + + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.Credentials; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.DtFetcher; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; + +import com.sun.jersey.api.client.ClientResponse.Status; + +import org.codehaus.jettison.json.JSONException; +import org.codehaus.jettison.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * DtFetcher is an interface which permits the abstraction and separation of + * delegation token fetch implementaions across different packages and + * compilation units. Resolution of fetcher impl will be done at runtime. + */ +public class RmDtFetcher implements DtFetcher { + private static final Logger LOG = LoggerFactory.getLogger(RmDtFetcher.class); + + private static final String SERVICE_NAME = "rm"; + + /** + * Returns the service name, which is also a valid URL prefix. + */ + public Text getServiceName() { + return new Text(SERVICE_NAME); + } + + public boolean isTokenRequired() { + return UserGroupInformation.isSecurityEnabled(); + } + + private String getJSONTokenString(BufferedReader reader) + throws JSONException, IOException { + String line = reader.readLine(); + while (line != null) { + JSONObject obj = new JSONObject(line); + if (obj.has("token")) { + return obj.getString("token"); + } + line = reader.readLine(); + } + return null; + } + + private String processJSONTransaction(URL url, + HttpURLConnection conn, + String method, + String contentType, + String body) throws IOException { + // send request + conn.setRequestMethod(method); + conn.setDoOutput(true); + conn.setRequestProperty("Accept-Charset", "UTF8"); + if (contentType != null && !contentType.isEmpty()) { + conn.setRequestProperty("Content-Type", contentType + ";charset=UTF8"); + if (body != null && !body.isEmpty()) { + try (OutputStream stream = conn.getOutputStream()) { + stream.write(body.getBytes("UTF8")); + } + } + } + // process response + String token = null; + InputStream response = null; + BufferedReader reader = null; + try { + response = conn.getInputStream(); + if (Status.OK.getStatusCode() != conn.getResponseCode()) { + throw new IOException("HTTP status from " + url + + " not OK: " + conn.getResponseCode()); + } + reader = new BufferedReader(new InputStreamReader(response, "UTF8")); + token = getJSONTokenString(reader); + } catch (JSONException jex) { + LOG.warn("JSON message from {} failed to parse" , url, jex); + } finally { + IOUtils.closeQuietly(reader); + IOUtils.closeQuietly(response); + } + return token; + } + + private Token getRMDelegationToken( + String renewer, URL url) throws IOException { + String body = String.format("{\"renewer\":\"%s\"}", renewer); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + String urlEncodedToken = processJSONTransaction(url, + conn, "POST", MediaType.APPLICATION_JSON, body); + if (urlEncodedToken == null) { + throw new IOException("Not able to fetch a valid token response from " + + url); + } + Token token = new Token<>(); + token.decodeFromUrlString(urlEncodedToken); + return token; + } + + /** + * Returns RMDelegationToken object via REST API. + * This implementation only supports the web app. The supplied URL should + * point to the host:port of the resource manager web app only. + * + * @param conf - ignored. + * @param creds - Credentials object to which token(s) will be added. + * @param renewer - String object holding the renewer. + * @param url - url to host:port, e.g. "foo.com:8088". + * If url is null, "localhost:8088" is used. + * @return Token<RMDelegationTokenIdentifier> + * @throws Exception + */ + public Token addDelegationTokens(Configuration conf, Credentials creds, + String renewer, String url) throws Exception { + if (url == null) { + url = "localhost:8088"; + } + String schema = YarnConfiguration.useHttps(conf) ? "https" : "http"; + Token token = getRMDelegationToken( + (renewer == null) ? "" : renewer, + new URL(schema + "://" + url + "/ws/v1/cluster/delegation-token")); + creds.addToken(token.getService(), token); + return token; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/TimelineDtFetcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/TimelineDtFetcher.java new file mode 100644 index 0000000..64d4b36 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/util/TimelineDtFetcher.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.util; + +import java.net.URI; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.Credentials; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.DtFetcher; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.yarn.client.api.TimelineClient; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; + +/** + * DtFetcher is an interface which permits the abstraction and separation of + * delegation token fetch implementaions across different packages and + * compilation units. Resolution of fetcher impl will be done at runtime. + */ +public class TimelineDtFetcher implements DtFetcher { + private static final Log LOG = LogFactory.getLog(TimelineDtFetcher.class); + + private static final String SERVICE_NAME = "timeline"; + + /** + * Returns the service name, which is also a valid URL prefix. + */ + public Text getServiceName() { + return new Text(SERVICE_NAME); + } + + public boolean isTokenRequired() { + return UserGroupInformation.isSecurityEnabled(); + } + + /** + * Returns TimelineDelegationToken object via TimelineClient. + * @param conf - Configuration object used to init TimelineClient. + * @param creds - Credentials object to which token(s) will be added. + * @param renewer - String object holding the renewer. + * @param url - String url overrides TIMELINE_SERVICE_WEBAPP_ADDRESS in conf. + * Example timeline DT URL: timeline://localhost:8188 + * @return TimelineDelegationToken with renewer. + * @throws Exception + */ + public Token addDelegationTokens(Configuration conf, Credentials creds, + String renewer, String url) throws Exception { + YarnConfiguration yarnConf = new YarnConfiguration(conf); + if (renewer == null) { + renewer = ""; + } + if (url != null) { + URI uri = new URI(url); + yarnConf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, + uri.getHost() + ":" + uri.getPort()); + } + Token token = null; + try(TimelineClient timelineClient = TimelineClient.createTimelineClient()) { + timelineClient.init(yarnConf); + timelineClient.start(); + token = timelineClient.getDelegationToken(renewer); + creds.addToken(token.getService(), token); + } + return token; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher new file mode 100644 index 0000000..4322e78 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +org.apache.hadoop.yarn.util.RmDtFetcher +org.apache.hadoop.yarn.util.TimelineDtFetcher diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestRmDtFetcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestRmDtFetcher.java new file mode 100644 index 0000000..2c76e71 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestRmDtFetcher.java @@ -0,0 +1,165 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.util; + +import java.io.File; +import java.util.concurrent.Callable; +import java.util.ServiceLoader; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.minikdc.MiniKdc; +import org.apache.hadoop.security.AuthenticationFilterInitializer; +import org.apache.hadoop.security.Credentials; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.KerberosTestUtils; +import org.apache.hadoop.security.authentication.server.AuthenticationFilter; +import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; +import org.apache.hadoop.security.token.DtFetcher; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; +import org.apache.hadoop.yarn.server.resourcemanager.MockRM; +import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; +import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Test class for RmDtFetcher implementation. + */ +public class TestRmDtFetcher { + private static File kdcDir = new File("target", + TestRmDtFetcher.class.getName() + "-root"); + private static File keytabFile = new File(KerberosTestUtils.getKeytabFile()); + private static String keytabPath = keytabFile.getAbsolutePath(); + private static String principal = KerberosTestUtils.getServerPrincipal(); + private static MiniKdc kdc = null; + private static MockRM rm = null; + + @BeforeClass + public static void setUp() { + try { + setupKdc(); + setupRM(); + } catch (Exception e) { + assertTrue("Couldn't create MiniKdc", false); + } + } + + @AfterClass + public static void tearDown() { + if (kdc != null) { + kdc.stop(); + } + if (rm != null) { + rm.stop(); + } + } + + private static void setupRM() throws Exception { + Configuration rmconf = new Configuration(); + rmconf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, + YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS); + rmconf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, + ResourceScheduler.class); + rmconf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); + String httpPrefix = "hadoop.http.authentication."; + rmconf.setStrings(httpPrefix + "type", "kerberos"); + rmconf.set(httpPrefix + KerberosAuthenticationHandler.PRINCIPAL, principal); + rmconf.set(httpPrefix + KerberosAuthenticationHandler.KEYTAB, keytabPath); + // use any file for signature secret + rmconf.set(httpPrefix + AuthenticationFilter.SIGNATURE_SECRET + ".file", + keytabPath); + rmconf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, + "kerberos"); + rmconf.setBoolean(YarnConfiguration.RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER, + true); + rmconf.set("hadoop.http.filter.initializers", + AuthenticationFilterInitializer.class.getName()); + rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); + rmconf.set(YarnConfiguration.RM_KEYTAB, keytabPath); + rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytabPath); + rmconf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, principal); + rmconf.set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, keytabPath); + rmconf.setBoolean("mockrm.webapp.enabled", true); + rmconf.set("yarn.resourcemanager.proxyuser.client.hosts", "*"); + rmconf.set("yarn.resourcemanager.proxyuser.client.groups", "*"); + UserGroupInformation.setConfiguration(rmconf); + rm = new MockRM(rmconf); + rm.start(); + } + + private static void setupKdc() throws Exception { + kdc = new MiniKdc(MiniKdc.createConf(), kdcDir); + kdc.start(); + kdc.createPrincipal(keytabFile, "HTTP/localhost", "client", + UserGroupInformation.getLoginUser().getShortUserName(), "client2"); + } + + private static Void bodyForTest() throws Exception { + RmDtFetcher fetcher = loadRmDtFetcher(); + assertTrue("RM DtFetcher service must load", fetcher != null); + Credentials ts = new Credentials(); + assertEquals("Credentials has zero tokens", 0, ts.numberOfTokens()); + Token token = fetcher.addDelegationTokens(new Configuration(), ts, + UserGroupInformation.getLoginUser().getShortUserName(), null); + assertTrue("RM delegation token is not null", token != null); + assertEquals("RM kind is correct", + token.getKind(), RMDelegationTokenIdentifier.KIND_NAME); + assertEquals("Credentials has one token", 1, ts.numberOfTokens()); + return null; + } + + public static RmDtFetcher loadRmDtFetcher() { + ServiceLoader loader = ServiceLoader.load(DtFetcher.class); + for (DtFetcher fetcher : loader) { + Text serviceName = fetcher.getServiceName(); + if (serviceName.equals(new Text("rm"))) { + return (RmDtFetcher)fetcher; + } + } + return null; + } + + @Test + public void testRmDtFetcherFetchAsClient() throws Exception { + KerberosTestUtils.doAsClient(new Callable() { + @Override + public Void call() throws Exception { + return bodyForTest(); + } + }); + } + + @Test + public void testRmDtFetcherFetchAsServer() throws Exception { + KerberosTestUtils.doAs("HTTP/localhost", new Callable() { + @Override + public Void call() throws Exception { + return bodyForTest(); + } + }); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestTimelineDtFetcher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestTimelineDtFetcher.java new file mode 100644 index 0000000..6c1a88a --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/util/TestTimelineDtFetcher.java @@ -0,0 +1,189 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.util; + +import java.io.File; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.Callable; +import java.util.ServiceLoader; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.http.HttpConfig; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.minikdc.MiniKdc; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.KerberosTestUtils; +import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; +import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.apache.hadoop.security.Credentials; +import org.apache.hadoop.security.token.DtFetcher; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; +import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer; +import org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore; +import org.apache.hadoop.yarn.server.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; + +/** + * Test class for TimelineDtFetcher. + */ +@RunWith(Parameterized.class) +public class TestTimelineDtFetcher { + public static final Text SERVICE_NAME = new Text("timeline"); + private static final String HTTP_USER = "HTTP"; + private static final String BASE_DIR = System.getProperty("test.build.dir", + "target/test-dir") + "/" + TestTimelineDtFetcher.class.getSimpleName(); + + private static File keytabFile = new File(KerberosTestUtils.getKeytabFile()); + private static String principal = KerberosTestUtils.getServerPrincipal(); + + private static MiniKdc kdc; + private static String keystoresDir; + private static String sslConfDir; + private static ApplicationHistoryServer timelineServer; + private static Configuration conf; + private static boolean withSsl; + + public TestTimelineDtFetcher(boolean withSsl) { + TestTimelineDtFetcher.withSsl = withSsl; + } + + @Parameterized.Parameters + public static Collection withSsl() { + return Arrays.asList(new Object[][] {{false}, {true}}); + } + + @BeforeClass + public static void setup() { + try { + kdc = new MiniKdc(MiniKdc.createConf(), new File(BASE_DIR, + TestTimelineDtFetcher.class.getName() + "-kdc-root")); + kdc.start(); + kdc.createPrincipal(keytabFile, HTTP_USER + "/localhost"); + } catch (Exception e) { + assertTrue("Couldn't setup MiniKdc", false); + } + + try { + timelineServer = new ApplicationHistoryServer(); + conf = new Configuration(false); + conf.setStrings(TimelineAuthenticationFilterInitializer.PREFIX + "type", + "kerberos"); + conf.set(TimelineAuthenticationFilterInitializer.PREFIX + + KerberosAuthenticationHandler.PRINCIPAL, principal); + conf.set(TimelineAuthenticationFilterInitializer.PREFIX + + KerberosAuthenticationHandler.KEYTAB, keytabFile.getAbsolutePath()); + conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, + "kerberos"); + conf.set(YarnConfiguration.TIMELINE_SERVICE_PRINCIPAL, principal); + conf.set(YarnConfiguration.TIMELINE_SERVICE_KEYTAB, + keytabFile.getAbsolutePath()); + conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); + conf.setClass(YarnConfiguration.TIMELINE_SERVICE_STORE, + MemoryTimelineStore.class, TimelineStore.class); + conf.set(YarnConfiguration.TIMELINE_SERVICE_ADDRESS, + "localhost:10200"); + conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, + "localhost:8188"); + conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS, + "localhost:8190"); + conf.set("hadoop.proxyuser.HTTP.hosts", "*"); + conf.setInt(YarnConfiguration.TIMELINE_SERVICE_CLIENT_MAX_RETRIES, 1); + + if (withSsl) { + conf.set(YarnConfiguration.YARN_HTTP_POLICY_KEY, + HttpConfig.Policy.HTTPS_ONLY.name()); + File base = new File(BASE_DIR); + FileUtil.fullyDelete(base); + base.mkdirs(); + keystoresDir = new File(BASE_DIR).getAbsolutePath(); + sslConfDir = + KeyStoreTestUtil.getClasspathDir(TestTimelineDtFetcher.class); + KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false); + } + + UserGroupInformation.setConfiguration(conf); + timelineServer.init(conf); + timelineServer.start(); + } catch (Exception e) { + assertTrue("Couldn't setup TimelineServer", false); + } + } + + @AfterClass + public static void tearDown() throws Exception { + if (kdc != null) { + kdc.stop(); + } + + if (timelineServer != null) { + timelineServer.stop(); + } + + if (withSsl) { + KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir); + File base = new File(BASE_DIR); + FileUtil.fullyDelete(base); + } + } + + public static TimelineDtFetcher loadTimelineDtFetcher() { + ServiceLoader loader = ServiceLoader.load(DtFetcher.class); + for (DtFetcher fetcher : loader) { + Text serviceName = fetcher.getServiceName(); + if (serviceName.equals(SERVICE_NAME)) { + return (TimelineDtFetcher)fetcher; + } + } + return null; + } + + @Test + public void testTimelineDtFetcherFetch() throws Exception { + KerberosTestUtils.doAs(HTTP_USER + "/localhost", new Callable() { + @Override + public Void call() throws Exception { + TimelineDtFetcher fetcher = loadTimelineDtFetcher(); + assertTrue("Timeline DtFetcher service must load", fetcher != null); + Credentials ts = new Credentials(); + assertEquals("Credentials has zero tokens", ts.numberOfTokens(), 0); + Token token = fetcher.addDelegationTokens(conf, ts, + UserGroupInformation.getLoginUser().getShortUserName(), null); + assertTrue("Timeline delegation token is null", token != null); + assertEquals("Timeline kind is not correct", + token.getKind(), TimelineDelegationTokenIdentifier.KIND_NAME); + assertEquals("Credentials has one token", ts.numberOfTokens(), 1); + return null; + } + }); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher new file mode 100644 index 0000000..4322e78 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher @@ -0,0 +1,17 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +org.apache.hadoop.yarn.util.RmDtFetcher +org.apache.hadoop.yarn.util.TimelineDtFetcher