diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/SliderAppMaster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/SliderAppMaster.java index bf18470..d7e5b67 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/SliderAppMaster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/SliderAppMaster.java @@ -137,7 +137,6 @@ import org.apache.slider.server.appmaster.rpc.RpcBinder; import org.apache.slider.server.appmaster.rpc.SliderClusterProtocolPBImpl; import org.apache.slider.server.appmaster.rpc.SliderIPCService; -import org.apache.slider.server.appmaster.security.SecurityConfiguration; import org.apache.slider.server.appmaster.state.AppState; import org.apache.slider.server.appmaster.state.AppStateBindingInfo; import org.apache.slider.server.appmaster.state.ContainerAssignment; @@ -287,7 +286,6 @@ /** Application Attempt Id ( combination of attemptId and fail count )*/ private ApplicationAttemptId appAttemptID; - private Configuration configuration; /** * App ACLs @@ -487,7 +485,6 @@ public synchronized void serviceInit(Configuration conf) throws Exception { timelineServiceEnabled = true; log.info("Enabled YARN timeline service v2. "); } - this.configuration = conf; //init all child services super.serviceInit(conf); } @@ -889,7 +886,7 @@ private SliderFileSystem doSecureLogin(String appName, SliderFileSystem fs, File keytab = new File(SliderKeys.KEYTAB_DIR + "/" + principal); if (!keytab.exists()) { String keytabPreInstalled = - configuration.get(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH); + getConfig().get(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH); if (!StringUtils.isEmpty(keytabPreInstalled)) { keytab = new File(keytabPreInstalled); log.info("Using pre-installed keytab at: " + keytabPreInstalled); @@ -900,7 +897,7 @@ private SliderFileSystem doSecureLogin(String appName, SliderFileSystem fs, return fs; } if (StringUtils.isEmpty(principal)) { - principal = SecurityConfiguration.getPrincipal(configuration); + principal = getPrincipal(getConfig()); log.info("Login with principal: " + principal); } login(principal, keytab); @@ -910,6 +907,18 @@ private SliderFileSystem doSecureLogin(String appName, SliderFileSystem fs, } + + public static String getPrincipal(Configuration conf) throws IOException { + String principal = conf.get( + SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL); + if (SliderUtils.isUnset(principal)) { + principal = UserGroupInformation.getLoginUser().getShortUserName(); + log.info("No principal set in the slider configuration. Will use AM " + + "login identity {} to attempt keytab-based login", principal); + } + return principal; + } + /** * Get the YARN application Attempt report as the logged in user * @param yarnClient client to the RM diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/security/SecurityConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/security/SecurityConfiguration.java deleted file mode 100644 index f48d650..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/security/SecurityConfiguration.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.slider.server.appmaster.security; - -import com.google.common.base.Preconditions; -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; -import static org.apache.slider.core.main.LauncherExitCodes.EXIT_UNAUTHORIZED; - -import org.apache.slider.api.resource.Application; -import org.apache.slider.common.SliderKeys; -import org.apache.slider.common.SliderXmlConfKeys; -import org.apache.slider.common.tools.SliderUtils; -import org.apache.slider.core.exceptions.SliderException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; - -/** - * Class keeping code security information - */ -public class SecurityConfiguration { - - protected static final Logger log = - LoggerFactory.getLogger(SecurityConfiguration.class); - private final Configuration configuration; - private final Application application; - private String clusterName; - - public SecurityConfiguration(Configuration configuration, - Application application, - String clusterName) throws SliderException { - Preconditions.checkNotNull(configuration); - Preconditions.checkNotNull(application); - Preconditions.checkNotNull(clusterName); - this.configuration = configuration; - this.application = application; - this.clusterName = clusterName; - validate(); - } - - private void validate() throws SliderException { - if (isSecurityEnabled()) { - // TODO use AM configuration rather than app configuration - String principal = application.getConfiguration().getProperty( - SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL); - if(SliderUtils.isUnset(principal)) { - // if no login identity is available, fail - UserGroupInformation loginUser = null; - try { - loginUser = getLoginUser(); - } catch (IOException e) { - throw new SliderException(EXIT_UNAUTHORIZED, e, - "No principal configured for the application and " - + "exception raised during retrieval of login user. " - + "Unable to proceed with application " - + "initialization. Please ensure a value " - + "for %s exists in the application " - + "configuration or the login issue is addressed", - SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL); - } - if (loginUser == null) { - throw new SliderException(EXIT_UNAUTHORIZED, - "No principal configured for the application " - + "and no login user found. " - + "Unable to proceed with application " - + "initialization. Please ensure a value " - + "for %s exists in the application " - + "configuration or the login issue is addressed", - SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL); - } - } - // ensure that either local or distributed keytab mechanism is enabled, - // but not both - String keytabFullPath = application.getConfiguration().getProperty( - SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH); - String keytabName = application.getConfiguration().getProperty( - SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME); - if (SliderUtils.isSet(keytabFullPath) && SliderUtils.isSet(keytabName)) { - throw new SliderException(EXIT_UNAUTHORIZED, - "Both a keytab on the cluster host (%s) and a" - + " keytab to be retrieved from HDFS (%s) are" - + " specified. Please configure only one keytab" - + " retrieval mechanism.", - SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH, - SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME); - - } - } - } - - protected UserGroupInformation getLoginUser() throws IOException { - return UserGroupInformation.getLoginUser(); - } - - public boolean isSecurityEnabled() { - return SliderUtils.isHadoopClusterSecure(configuration); - } - - public static String getPrincipal(Configuration conf) throws IOException { - String principal = conf.get( - SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL); - if (SliderUtils.isUnset(principal)) { - principal = UserGroupInformation.getLoginUser().getShortUserName(); - log.info("No principal set in the slider configuration. Will use AM " + - "login identity {} to attempt keytab-based login", principal); - } - - return principal; - } - - public static boolean isKeytabProvided(Configuration configuration) { - String keytabLocalPath = configuration.get( - SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH); - String keytabName = configuration.get( - SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME); - return StringUtils.isNotBlank(keytabLocalPath) - || StringUtils.isNotBlank(keytabName); - - } - - public File getKeytabFile() - throws SliderException, IOException { - //TODO implement this for dash semantic - String keytabFullPath = application.getConfiguration().getProperty( - SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH); - File localKeytabFile; - if (SliderUtils.isUnset(keytabFullPath)) { - // get the keytab - String keytabName = application.getConfiguration().getProperty( - SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME); - log.info("No host keytab file path specified. Will attempt to retrieve" - + " keytab file {} as a local resource for the container", - keytabName); - // download keytab to local, protected directory - localKeytabFile = new File(SliderKeys.KEYTAB_DIR, keytabName); - } else { - log.info("Using host keytab file {} for login", keytabFullPath); - localKeytabFile = new File(keytabFullPath); - } - return localKeytabFile; - } - -} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/security/TestSecurityConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/security/TestSecurityConfiguration.java deleted file mode 100644 index 5354415..0000000 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/security/TestSecurityConfiguration.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.slider.server.appmaster.security; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.slider.api.resource.Application; -import org.apache.slider.common.SliderKeys; -import org.apache.slider.common.SliderXmlConfKeys; -import org.apache.slider.core.exceptions.SliderException; -import org.junit.Test; - -import java.io.File; -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -/** - * Test security configuration. - */ -public class TestSecurityConfiguration { - - @Test - public void testValidLocalConfiguration() throws Throwable { - Configuration config = new Configuration(); - config.set(CommonConfigurationKeysPublic - .HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - Map compOps = new HashMap<>(); - compOps.put(SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL, "test"); - compOps.put(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH, - "/some/local/path"); - Application application = new Application().configuration(new org.apache - .slider.api.resource.Configuration().properties(compOps)); - - SecurityConfiguration securityConfiguration = - new SecurityConfiguration(config, application, "testCluster"); - } - - @Test - public void testValidDistributedConfiguration() throws Throwable { - Configuration config = new Configuration(); - config.set(CommonConfigurationKeysPublic - .HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - Map compOps = new HashMap<>(); - compOps.put(SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL, "test"); - compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab"); - Application application = new Application().configuration(new org.apache - .slider.api.resource.Configuration().properties(compOps)); - - SecurityConfiguration securityConfiguration = - new SecurityConfiguration(config, application, "testCluster"); - } - - @Test - public void testMissingPrincipalNoLoginWithDistributedConfig() throws - Throwable { - Configuration config = new Configuration(); - config.set(CommonConfigurationKeysPublic - .HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - Map compOps = new HashMap<>(); - compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab"); - Application application = new Application().configuration(new org.apache - .slider.api.resource.Configuration().properties(compOps)); - - try { - SecurityConfiguration securityConfiguration = - new SecurityConfiguration(config, application, "testCluster") { - @Override - protected UserGroupInformation getLoginUser() throws - IOException { - return null; - } - }; - fail("expected SliderException"); - } catch (SliderException e) { - // expected - } - } - - @Test - public void testMissingPrincipalNoLoginWithLocalConfig() throws Throwable { - Configuration config = new Configuration(); - config.set(CommonConfigurationKeysPublic - .HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - Map compOps = new HashMap<>(); - compOps.put(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH, - "/some/local/path"); - Application application = new Application().configuration(new org.apache - .slider.api.resource.Configuration().properties(compOps)); - - try { - SecurityConfiguration securityConfiguration = - new SecurityConfiguration(config, application, "testCluster") { - @Override - protected UserGroupInformation getLoginUser() throws IOException { - return null; - } - }; - fail("expected SliderException"); - } catch (SliderException e) { - // expected - } - } - - @Test - public void testBothKeytabMechanismsConfigured() throws Throwable { - Configuration config = new Configuration(); - config.set(CommonConfigurationKeysPublic - .HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - Map compOps = new HashMap<>(); - compOps.put(SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL, "test"); - compOps.put(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH, - "/some/local/path"); - compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab"); - Application application = new Application().configuration(new org.apache - .slider.api.resource.Configuration().properties(compOps)); - - try { - SecurityConfiguration securityConfiguration = - new SecurityConfiguration(config, application, - "testCluster"); - fail("expected SliderException"); - } catch (SliderException e) { - // expected - } - } - - @Test - public void testMissingPrincipalButLoginWithDistributedConfig() throws - Throwable { - Configuration config = new Configuration(); - config.set(CommonConfigurationKeysPublic - .HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - Map compOps = new HashMap<>(); - compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab"); - Application application = new Application().configuration(new org.apache - .slider.api.resource.Configuration().properties(compOps)); - - SecurityConfiguration securityConfiguration = - new SecurityConfiguration(config, application, "testCluster"); - } - - @Test - public void testMissingPrincipalButLoginWithLocalConfig() throws Throwable { - Configuration config = new Configuration(); - config.set(CommonConfigurationKeysPublic - .HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - Map compOps = new HashMap<>(); - compOps.put(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH, - "/some/local/path"); - Application application = new Application().configuration(new org.apache - .slider.api.resource.Configuration().properties(compOps)); - - SecurityConfiguration securityConfiguration = - new SecurityConfiguration(config, application, "testCluster"); - } - - @Test - public void testKeypathLocationOnceLocalized() throws Throwable { - Configuration config = new Configuration(); - config.set(CommonConfigurationKeysPublic - .HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - Map compOps = new HashMap<>(); - compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab"); - Application application = new Application().configuration(new org.apache - .slider.api.resource.Configuration().properties(compOps)); - - SecurityConfiguration securityConfiguration = - new SecurityConfiguration(config, application, "testCluster"); - - assertEquals(new File(SliderKeys.KEYTAB_DIR, "some.keytab") - .getAbsolutePath(), - securityConfiguration.getKeytabFile().getAbsolutePath()); - } -} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java index 6e6e98b..3298fc3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java @@ -20,7 +20,8 @@ import java.io.IOException; import java.net.InetSocketAddress; -import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -29,7 +30,6 @@ import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.source.JvmMetrics; -import org.apache.hadoop.security.AuthenticationFilterInitializer; import org.apache.hadoop.security.HttpCrossOriginFilterInitializer; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.service.CompositeService; @@ -49,10 +49,9 @@ import org.apache.hadoop.yarn.server.timeline.TimelineDataManager; import org.apache.hadoop.yarn.server.timeline.TimelineStore; import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager; -import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilter; -import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer; -import org.apache.hadoop.yarn.server.timeline.security.TimelineDelegationTokenSecretManagerService; +import org.apache.hadoop.yarn.server.timeline.security.TimelineV1DelegationTokenSecretManagerService; import org.apache.hadoop.yarn.server.timeline.webapp.CrossOriginFilterInitializer; +import org.apache.hadoop.yarn.server.util.timeline.TimelineServerUtils; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApps; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; @@ -75,7 +74,7 @@ private ApplicationACLsManager aclsManager; private ApplicationHistoryManager historyManager; private TimelineStore timelineStore; - private TimelineDelegationTokenSecretManagerService secretManagerService; + private TimelineV1DelegationTokenSecretManagerService secretManagerService; private TimelineDataManager timelineDataManager; private WebApp webApp; private JvmPauseMonitor pauseMonitor; @@ -223,9 +222,9 @@ private TimelineStore createTimelineStore( TimelineStore.class), conf); } - private TimelineDelegationTokenSecretManagerService + private TimelineV1DelegationTokenSecretManagerService createTimelineDelegationTokenSecretManagerService(Configuration conf) { - return new TimelineDelegationTokenSecretManagerService(); + return new TimelineV1DelegationTokenSecretManagerService(); } private TimelineDataManager createTimelineDataManager(Configuration conf) { @@ -237,63 +236,35 @@ private TimelineDataManager createTimelineDataManager(Configuration conf) { @SuppressWarnings("unchecked") private void startWebApp() { Configuration conf = getConfig(); - TimelineAuthenticationFilter.setTimelineDelegationTokenSecretManager( - secretManagerService.getTimelineDelegationTokenSecretManager()); // Always load pseudo authentication filter to parse "user.name" in an URL // to identify a HTTP request's user in insecure mode. // When Kerberos authentication type is set (i.e., secure mode is turned on), // the customized filter will be loaded by the timeline server to do Kerberos // + DT authentication. String initializers = conf.get("hadoop.http.filter.initializers"); - boolean modifiedInitializers = false; - initializers = initializers == null || initializers.length() == 0 ? "" : initializers; - + Set defaultInitializers = new LinkedHashSet(); + // Add CORS filter if (!initializers.contains(CrossOriginFilterInitializer.class.getName())) { - if(conf.getBoolean(YarnConfiguration - .TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED, YarnConfiguration - .TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED_DEFAULT)) { - if (initializers.contains(HttpCrossOriginFilterInitializer.class.getName())) { - initializers = - initializers.replaceAll(HttpCrossOriginFilterInitializer.class.getName(), + if(conf.getBoolean(YarnConfiguration. + TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED, + YarnConfiguration. + TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED_DEFAULT)) { + if (initializers.contains( + HttpCrossOriginFilterInitializer.class.getName())) { + initializers = initializers.replaceAll( + HttpCrossOriginFilterInitializer.class.getName(), CrossOriginFilterInitializer.class.getName()); + } else { + defaultInitializers.add(CrossOriginFilterInitializer.class.getName()); } - else { - if (initializers.length() != 0) { - initializers += ","; - } - initializers += CrossOriginFilterInitializer.class.getName(); - } - modifiedInitializers = true; - } - } - - if (!initializers.contains(TimelineAuthenticationFilterInitializer.class - .getName())) { - if (initializers.length() != 0) { - initializers += ","; - } - initializers += TimelineAuthenticationFilterInitializer.class.getName(); - modifiedInitializers = true; - } - - String[] parts = initializers.split(","); - ArrayList target = new ArrayList(); - for (String filterInitializer : parts) { - filterInitializer = filterInitializer.trim(); - if (filterInitializer.equals(AuthenticationFilterInitializer.class - .getName())) { - modifiedInitializers = true; - continue; } - target.add(filterInitializer); - } - String actualInitializers = - org.apache.commons.lang.StringUtils.join(target, ","); - if (modifiedInitializers) { - conf.set("hadoop.http.filter.initializers", actualInitializers); } + TimelineServerUtils.addTimelineAuthFilter( + initializers, defaultInitializers, secretManagerService); + TimelineServerUtils.setTimelineFilters( + conf, initializers, defaultInitializers); String bindAddress = WebAppUtils.getWebAppBindURL(conf, YarnConfiguration.TIMELINE_SERVICE_BIND_HOST, WebAppUtils.getAHSWebAppURLWithoutScheme(conf)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilter.java index ad8dc2c..f6d1863 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilter.java @@ -23,27 +23,33 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationFilter; -import org.apache.hadoop.yarn.server.timeline.security.TimelineDelegationTokenSecretManagerService.TimelineDelegationTokenSecretManager; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; +/** + * Timeline authentication filter provides delegation token support for ATSv1 + * and ATSv2. + */ @Private @Unstable public class TimelineAuthenticationFilter extends DelegationTokenAuthenticationFilter { - private static TimelineDelegationTokenSecretManager secretManager; + private static AbstractDelegationTokenSecretManager + secretManager; @Override public void init(FilterConfig filterConfig) throws ServletException { filterConfig.getServletContext().setAttribute( - DelegationTokenAuthenticationFilter.DELEGATION_TOKEN_SECRET_MANAGER_ATTR, - secretManager); + DelegationTokenAuthenticationFilter. + DELEGATION_TOKEN_SECRET_MANAGER_ATTR, secretManager); super.init(filterConfig); } public static void setTimelineDelegationTokenSecretManager( - TimelineDelegationTokenSecretManager secretManager) { - TimelineAuthenticationFilter.secretManager = secretManager; + AbstractDelegationTokenSecretManager + secretMgr) { + TimelineAuthenticationFilter.secretManager = secretMgr; } - } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineDelegationTokenSecretManagerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineDelegationTokenSecretManagerService.java index 60a0348..433be36 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineDelegationTokenSecretManagerService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineDelegationTokenSecretManagerService.java @@ -28,7 +28,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; import org.apache.hadoop.security.token.delegation.DelegationKey; -import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; @@ -37,18 +36,16 @@ import org.apache.hadoop.yarn.server.timeline.recovery.TimelineStateStore.TimelineServiceState; /** - * The service wrapper of {@link TimelineDelegationTokenSecretManager} + * The service wrapper of {@link TimelineV1DelegationTokenSecretManager}. */ @Private @Unstable -public class TimelineDelegationTokenSecretManagerService extends - AbstractService { - - private TimelineDelegationTokenSecretManager secretManager = null; +public class TimelineV1DelegationTokenSecretManagerService extends + TimelineDelgationTokenSecretManagerService { private TimelineStateStore stateStore = null; - public TimelineDelegationTokenSecretManagerService() { - super(TimelineDelegationTokenSecretManagerService.class.getName()); + public TimelineV1DelegationTokenSecretManagerService() { + super(TimelineV1DelegationTokenSecretManagerService.class.getName()); } @Override @@ -58,19 +55,7 @@ protected void serviceInit(Configuration conf) throws Exception { stateStore = createStateStore(conf); stateStore.init(conf); } - - long secretKeyInterval = - conf.getLong(YarnConfiguration.TIMELINE_DELEGATION_KEY_UPDATE_INTERVAL, - YarnConfiguration.DEFAULT_TIMELINE_DELEGATION_KEY_UPDATE_INTERVAL); - long tokenMaxLifetime = - conf.getLong(YarnConfiguration.TIMELINE_DELEGATION_TOKEN_MAX_LIFETIME, - YarnConfiguration.DEFAULT_TIMELINE_DELEGATION_TOKEN_MAX_LIFETIME); - long tokenRenewInterval = - conf.getLong(YarnConfiguration.TIMELINE_DELEGATION_TOKEN_RENEW_INTERVAL, - YarnConfiguration.DEFAULT_TIMELINE_DELEGATION_TOKEN_RENEW_INTERVAL); - secretManager = new TimelineDelegationTokenSecretManager(secretKeyInterval, - tokenMaxLifetime, tokenRenewInterval, 3600000, stateStore); - super.init(conf); + super.serviceInit(conf); } @Override @@ -78,10 +63,9 @@ protected void serviceStart() throws Exception { if (stateStore != null) { stateStore.start(); TimelineServiceState state = stateStore.loadState(); - secretManager.recover(state); + ((TimelineV1DelegationTokenSecretManager) + getTimelineDelegationTokenSecretManager()).recover(state); } - - secretManager.startThreads(); super.serviceStart(); } @@ -90,9 +74,18 @@ protected void serviceStop() throws Exception { if (stateStore != null) { stateStore.stop(); } + super.serviceStop(); + } - secretManager.stopThreads(); - super.stop(); + @Override + protected AbstractDelegationTokenSecretManager + + createTimelineDelegationTokenSecretManager(long secretKeyInterval, + long tokenMaxLifetime, long tokenRenewInterval, + long tokenRemovalScanInterval) { + return new TimelineV1DelegationTokenSecretManager(secretKeyInterval, + tokenMaxLifetime, tokenRenewInterval, tokenRemovalScanInterval, + stateStore); } protected TimelineStateStore createStateStore( @@ -104,27 +97,20 @@ protected TimelineStateStore createStateStore( } /** - * Ge the instance of {link #TimelineDelegationTokenSecretManager} - * - * @return the instance of {link #TimelineDelegationTokenSecretManager} + * Delegation token secret manager for ATSv1 and ATSv1.5. */ - public TimelineDelegationTokenSecretManager - getTimelineDelegationTokenSecretManager() { - return secretManager; - } - @Private @Unstable - public static class TimelineDelegationTokenSecretManager extends + public static class TimelineV1DelegationTokenSecretManager extends AbstractDelegationTokenSecretManager { public static final Log LOG = - LogFactory.getLog(TimelineDelegationTokenSecretManager.class); + LogFactory.getLog(TimelineV1DelegationTokenSecretManager.class); private TimelineStateStore stateStore; /** - * Create a timeline secret manager + * Create a timeline v1 secret manager. * @param delegationKeyUpdateInterval the number of milliseconds for rolling * new secret keys. * @param delegationTokenMaxLifetime the maximum lifetime of the delegation @@ -135,7 +121,7 @@ protected TimelineStateStore createStateStore( * scanned for expired tokens in milliseconds * @param stateStore timeline service state store */ - public TimelineDelegationTokenSecretManager( + public TimelineV1DelegationTokenSecretManager( long delegationKeyUpdateInterval, long delegationTokenMaxLifetime, long delegationTokenRenewInterval, @@ -236,5 +222,4 @@ public void recover(TimelineServiceState state) throws IOException { } } } - } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineAuthenticationFilter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineAuthenticationFilter.java index 063f512..d918e8d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineAuthenticationFilter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/security/TestTimelineAuthenticationFilter.java @@ -55,27 +55,31 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; +/** + * Test cases for authentication via TimelineAuthenticationFilter while + * publishing entities for ATSv1. + */ @RunWith(Parameterized.class) -public class TestTimelineAuthenticationFilter { +public class TestTimelineAuthenticationFilterForV1 { private static final String FOO_USER = "foo"; private static final String BAR_USER = "bar"; private static final String HTTP_USER = "HTTP"; - private static final File testRootDir = new File( + private static final File TEST_ROOT_DIR = new File( System.getProperty("test.build.dir", "target/test-dir"), - TestTimelineAuthenticationFilter.class.getName() + "-root"); + TestTimelineAuthenticationFilterForV1.class.getName() + "-root"); private static File httpSpnegoKeytabFile = new File( KerberosTestUtils.getKeytabFile()); private static String httpSpnegoPrincipal = KerberosTestUtils.getServerPrincipal(); private static final String BASEDIR = System.getProperty("test.build.dir", "target/test-dir") + "/" - + TestTimelineAuthenticationFilter.class.getSimpleName(); + + TestTimelineAuthenticationFilterForV1.class.getSimpleName(); @Parameterized.Parameters public static Collection withSsl() { - return Arrays.asList(new Object[][] { { false }, { true } }); + return Arrays.asList(new Object[][] {{false}, {true}}); } private static MiniKdc testMiniKDC; @@ -85,14 +89,14 @@ private static Configuration conf; private static boolean withSsl; - public TestTimelineAuthenticationFilter(boolean withSsl) { - TestTimelineAuthenticationFilter.withSsl = withSsl; + public TestTimelineAuthenticationFilterForV1(boolean withSsl) { + TestTimelineAuthenticationFilterForV1.withSsl = withSsl; } @BeforeClass public static void setup() { try { - testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir); + testMiniKDC = new MiniKdc(MiniKdc.createConf(), TEST_ROOT_DIR); testMiniKDC.start(); testMiniKDC.createPrincipal( httpSpnegoKeytabFile, HTTP_USER + "/localhost"); @@ -111,11 +115,11 @@ public static void setup() { KerberosAuthenticationHandler.KEYTAB, httpSpnegoKeytabFile.getAbsolutePath()); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, - "kerberos"); + "kerberos"); conf.set(YarnConfiguration.TIMELINE_SERVICE_PRINCIPAL, - httpSpnegoPrincipal); + httpSpnegoPrincipal); conf.set(YarnConfiguration.TIMELINE_SERVICE_KEYTAB, - httpSpnegoKeytabFile.getAbsolutePath()); + httpSpnegoKeytabFile.getAbsolutePath()); conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true); conf.setClass(YarnConfiguration.TIMELINE_SERVICE_STORE, MemoryTimelineStore.class, TimelineStore.class); @@ -136,8 +140,8 @@ public static void setup() { FileUtil.fullyDelete(base); base.mkdirs(); keystoresDir = new File(BASEDIR).getAbsolutePath(); - sslConfDir = - KeyStoreTestUtil.getClasspathDir(TestTimelineAuthenticationFilter.class); + sslConfDir = KeyStoreTestUtil.getClasspathDir( + TestTimelineAuthenticationFilterForV1.class); KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false); } @@ -145,6 +149,7 @@ public static void setup() { testTimelineServer.init(conf); testTimelineServer.start(); } catch (Exception e) { + e.printStackTrace(); assertTrue("Couldn't setup TimelineServer", false); } } @@ -181,14 +186,14 @@ public Void call() throws Exception { TimelineClient client = createTimelineClientForUGI(); TimelineEntity entityToStore = new TimelineEntity(); entityToStore.setEntityType( - TestTimelineAuthenticationFilter.class.getName()); + TestTimelineAuthenticationFilterForV1.class.getName()); entityToStore.setEntityId("entity1"); entityToStore.setStartTime(0L); TimelinePutResponse putResponse = client.putEntities(entityToStore); Assert.assertEquals(0, putResponse.getErrors().size()); TimelineEntity entityToRead = - testTimelineServer.getTimelineStore().getEntity( - "entity1", TestTimelineAuthenticationFilter.class.getName(), null); + testTimelineServer.getTimelineStore().getEntity("entity1", + TestTimelineAuthenticationFilterForV1.class.getName(), null); Assert.assertNotNull(entityToRead); return null; } @@ -202,13 +207,14 @@ public void testPutDomains() throws Exception { public Void call() throws Exception { TimelineClient client = createTimelineClientForUGI(); TimelineDomain domainToStore = new TimelineDomain(); - domainToStore.setId(TestTimelineAuthenticationFilter.class.getName()); + domainToStore.setId( + TestTimelineAuthenticationFilterForV1.class.getName()); domainToStore.setReaders("*"); domainToStore.setWriters("*"); client.putDomain(domainToStore); TimelineDomain domainToRead = testTimelineServer.getTimelineStore().getDomain( - TestTimelineAuthenticationFilter.class.getName()); + TestTimelineAuthenticationFilterForV1.class.getName()); Assert.assertNotNull(domainToRead); return null; } @@ -218,22 +224,24 @@ public Void call() throws Exception { @Test public void testDelegationTokenOperations() throws Exception { TimelineClient httpUserClient = - KerberosTestUtils.doAs(HTTP_USER + "/localhost", new Callable() { - @Override - public TimelineClient call() throws Exception { - return createTimelineClientForUGI(); - } - }); + KerberosTestUtils.doAs(HTTP_USER + "/localhost", + new Callable() { + @Override + public TimelineClient call() throws Exception { + return createTimelineClientForUGI(); + } + }); UserGroupInformation httpUser = - KerberosTestUtils.doAs(HTTP_USER + "/localhost", new Callable() { - @Override - public UserGroupInformation call() throws Exception { - return UserGroupInformation.getCurrentUser(); - } - }); + KerberosTestUtils.doAs(HTTP_USER + "/localhost", + new Callable() { + @Override + public UserGroupInformation call() throws Exception { + return UserGroupInformation.getCurrentUser(); + } + }); // Let HTTP user to get the delegation for itself Token token = - httpUserClient.getDelegationToken(httpUser.getShortUserName()); + httpUserClient.getDelegationToken(httpUser.getShortUserName()); Assert.assertNotNull(token); TimelineDelegationTokenIdentifier tDT = token.decodeIdentifier(); Assert.assertNotNull(tDT); @@ -317,7 +325,8 @@ public TimelineClient run() { barUserClient.getDelegationToken(httpUser.getShortUserName()); Assert.fail(); } catch (Exception e) { - Assert.assertTrue(e.getCause() instanceof AuthorizationException || e.getCause() instanceof AuthenticationException); + Assert.assertTrue(e.getCause() instanceof AuthorizationException || + e.getCause() instanceof AuthenticationException); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineDelgationTokenSecretManagerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineDelgationTokenSecretManagerService.java new file mode 100644 index 0000000..2e95af2 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineDelgationTokenSecretManagerService.java @@ -0,0 +1,83 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timeline.security; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; + +/** + * Abstract implementation of delegation token manager service for different + * versions of timeline service. + */ +public abstract class TimelineDelgationTokenSecretManagerService extends + AbstractService { + + public TimelineDelgationTokenSecretManagerService(String name) { + super(name); + } + + private static long delegationTokenRemovalScanInterval = 3600000L; + + private AbstractDelegationTokenSecretManager + secretManager = null; + + @Override + protected void serviceInit(Configuration conf) throws Exception { + long secretKeyInterval = + conf.getLong(YarnConfiguration.TIMELINE_DELEGATION_KEY_UPDATE_INTERVAL, + YarnConfiguration.DEFAULT_TIMELINE_DELEGATION_KEY_UPDATE_INTERVAL); + long tokenMaxLifetime = + conf.getLong(YarnConfiguration.TIMELINE_DELEGATION_TOKEN_MAX_LIFETIME, + YarnConfiguration.DEFAULT_TIMELINE_DELEGATION_TOKEN_MAX_LIFETIME); + long tokenRenewInterval = + conf.getLong(YarnConfiguration.TIMELINE_DELEGATION_TOKEN_RENEW_INTERVAL, + YarnConfiguration.DEFAULT_TIMELINE_DELEGATION_TOKEN_RENEW_INTERVAL); + secretManager = createTimelineDelegationTokenSecretManager( + secretKeyInterval, tokenMaxLifetime, tokenRenewInterval, + delegationTokenRemovalScanInterval); + super.init(conf); + } + + protected abstract + AbstractDelegationTokenSecretManager + createTimelineDelegationTokenSecretManager(long secretKeyInterval, + long tokenMaxLifetime, long tokenRenewInterval, + long tokenRemovalScanInterval); + + @Override + protected void serviceStart() throws Exception { + secretManager.startThreads(); + super.serviceStart(); + } + + @Override + protected void serviceStop() throws Exception { + secretManager.stopThreads(); + super.stop(); + } + + public AbstractDelegationTokenSecretManager + + getTimelineDelegationTokenSecretManager() { + return secretManager; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/package-info.java new file mode 100644 index 0000000..14a52e3 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/package-info.java @@ -0,0 +1,26 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.server.timeline.security contains classes related + * to timeline authentication filters and abstract delegation token service for + * ATSv1 and ATSv2. + */ +@InterfaceAudience.Private +package org.apache.hadoop.yarn.server.timeline.security; +import org.apache.hadoop.classification.InterfaceAudience; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/util/timeline/TimelineServerUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/util/timeline/TimelineServerUtils.java new file mode 100644 index 0000000..5beb6dc --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/util/timeline/TimelineServerUtils.java @@ -0,0 +1,86 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.util.timeline; + +import java.util.LinkedHashSet; +import java.util.Set; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.AuthenticationFilterInitializer; +import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilter; +import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer; +import org.apache.hadoop.yarn.server.timeline.security.TimelineDelgationTokenSecretManagerService; + +/** + * Set of utility methods to be used across timeline reader and collector. + */ +public final class TimelineServerUtils { + private TimelineServerUtils() { + } + + /** + * Sets filter initializers configuration based on existing configuration and + * default filters added by timeline service(such as timeline auth filter and + * CORS filter). + * @param conf Configuration object. + * @param configuredInitializers Comma separated list of filter initializers. + * @param defaultInitializers Set of initializers added by default by timeline + * service. + */ + public static void setTimelineFilters(Configuration conf, + String configuredInitializers, Set defaultInitializers) { + String[] parts = configuredInitializers.split(","); + Set target = new LinkedHashSet(); + for (String filterInitializer : parts) { + filterInitializer = filterInitializer.trim(); + if (filterInitializer.equals( + AuthenticationFilterInitializer.class.getName()) || + filterInitializer.isEmpty()) { + continue; + } + target.add(filterInitializer); + } + target.addAll(defaultInitializers); + String actualInitializers = + org.apache.commons.lang.StringUtils.join(target, ","); + conf.set("hadoop.http.filter.initializers", actualInitializers); + } + + /** + * Adds timeline authentication filter to the set of default filter + * initializers and assigns the delegation token manager service to it. + * @param initializers Comma separated list of filter initializers. + * @param defaultInitializers Set of initializers added by default by timeline + * service. + * @param delegationTokenMgrService Delegation token manager service. + * This will be used by timeline authentication filter to assign + * delegation tokens. + */ + public static void addTimelineAuthFilter(String initializers, + Set defaultInitializers, + TimelineDelgationTokenSecretManagerService delegationTokenMgrService) { + TimelineAuthenticationFilter.setTimelineDelegationTokenSecretManager( + delegationTokenMgrService.getTimelineDelegationTokenSecretManager()); + if (!initializers.contains( + TimelineAuthenticationFilterInitializer.class.getName())) { + defaultInitializers.add( + TimelineAuthenticationFilterInitializer.class.getName()); + } + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/util/timeline/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/util/timeline/package-info.java new file mode 100644 index 0000000..75c6973 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/util/timeline/package-info.java @@ -0,0 +1,25 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.server.util.timeline contains utility classes used + * by ATSv1 and ATSv2 on the server side. + */ +@InterfaceAudience.Private +package org.apache.hadoop.yarn.server.util.timeline; +import org.apache.hadoop.classification.InterfaceAudience; \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/NodeTimelineCollectorManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/NodeTimelineCollectorManager.java index 0323d7b..47c5455 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/NodeTimelineCollectorManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/NodeTimelineCollectorManager.java @@ -18,13 +18,11 @@ package org.apache.hadoop.yarn.server.timelineservice.collector; -import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER; -import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER; - import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; -import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -32,7 +30,9 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.HttpServer2; -import org.apache.hadoop.http.lib.StaticUserWebFilter; +import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; @@ -42,6 +42,8 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorContextRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorContextResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.ReportNewCollectorInfoRequest; +import org.apache.hadoop.yarn.server.timelineservice.security.TimelineV2DelegationTokenSecretManagerService; +import org.apache.hadoop.yarn.server.util.timeline.TimelineServerUtils; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; @@ -65,17 +67,51 @@ private volatile CollectorNodemanagerProtocol nmCollectorService; + private TimelineV2DelegationTokenSecretManagerService tokenMgrService; + + private final boolean runningAsAuxService; + static final String COLLECTOR_MANAGER_ATTR_KEY = "collector.manager"; @VisibleForTesting protected NodeTimelineCollectorManager() { + this(true); + } + + protected NodeTimelineCollectorManager(boolean asAuxService) { super(NodeTimelineCollectorManager.class.getName()); + this.runningAsAuxService = asAuxService; + } + + @Override + protected void serviceInit(Configuration conf) throws Exception { + tokenMgrService = new TimelineV2DelegationTokenSecretManagerService(); + addService(tokenMgrService); + super.serviceInit(conf); } @Override protected void serviceStart() throws Exception { - startWebApp(); + if (UserGroupInformation.isSecurityEnabled() && !runningAsAuxService) { + // Do security login for cases where collector is running outside NM. + try { + doSecureLogin(); + } catch(IOException ie) { + throw new YarnRuntimeException("Failed to login", ie); + } + } super.serviceStart(); + startWebApp(); + } + + private void doSecureLogin() throws IOException { + Configuration conf = getConfig(); + InetSocketAddress addr = NetUtils.createSocketAddr(conf.getTrimmed( + YarnConfiguration.TIMELINE_SERVICE_BIND_HOST, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_BIND_HOST), 0, + YarnConfiguration.TIMELINE_SERVICE_BIND_HOST); + SecurityUtil.login(conf, YarnConfiguration.TIMELINE_SERVICE_KEYTAB, + YarnConfiguration.TIMELINE_SERVICE_PRINCIPAL, addr.getHostName()); } @Override @@ -105,6 +141,14 @@ protected void doPostPut(ApplicationId appId, TimelineCollector collector) { */ private void startWebApp() { Configuration conf = getConfig(); + String initializers = conf.get("hadoop.http.filter.initializers"); + Set defaultInitializers = new LinkedHashSet(); + initializers = + initializers == null || initializers.length() == 0 ? "" : initializers; + TimelineServerUtils.addTimelineAuthFilter( + initializers, defaultInitializers, tokenMgrService); + TimelineServerUtils.setTimelineFilters( + conf, initializers, defaultInitializers); String bindAddress = conf.get(YarnConfiguration.TIMELINE_SERVICE_BIND_HOST, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_BIND_HOST) + ":0"; try { @@ -114,16 +158,10 @@ private void startWebApp() { .addEndpoint(URI.create( (YarnConfiguration.useHttps(conf) ? "https://" : "http://") + bindAddress)); + if (YarnConfiguration.useHttps(conf)) { + builder = WebAppUtils.loadSslConfiguration(builder, conf); + } timelineRestServer = builder.build(); - // TODO: replace this by an authentication filter in future. - HashMap options = new HashMap<>(); - String username = conf.get(HADOOP_HTTP_STATIC_USER, - DEFAULT_HADOOP_HTTP_STATIC_USER); - options.put(HADOOP_HTTP_STATIC_USER, username); - HttpServer2.defineFilter(timelineRestServer.getWebAppContext(), - "static_user_filter_timeline", - StaticUserWebFilter.StaticUserFilter.class.getName(), - options, new String[] {"/*"}); timelineRestServer.addJerseyResourcePackage( TimelineCollectorWebService.class.getPackage().getName() + ";" diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java index 266bd04..aeb18c0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java @@ -61,7 +61,7 @@ private ScheduledExecutorService scheduler; public PerNodeTimelineCollectorsAuxService() { - this(new NodeTimelineCollectorManager()); + this(new NodeTimelineCollectorManager(true)); } @VisibleForTesting PerNodeTimelineCollectorsAuxService( @@ -202,7 +202,8 @@ public ByteBuffer getMetaData() { PerNodeTimelineCollectorsAuxService auxService = null; try { auxService = collectorManager == null ? - new PerNodeTimelineCollectorsAuxService() : + new PerNodeTimelineCollectorsAuxService( + new NodeTimelineCollectorManager(false)) : new PerNodeTimelineCollectorsAuxService(collectorManager); ShutdownHookManager.get().addShutdownHook(new ShutdownHook(auxService), SHUTDOWN_HOOK_PRIORITY); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager.java index 07cbb2b..cc23cc9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/TimelineCollectorManager.java @@ -31,7 +31,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.service.CompositeService; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -47,7 +47,7 @@ */ @InterfaceAudience.Private @InterfaceStability.Unstable -public class TimelineCollectorManager extends AbstractService { +public class TimelineCollectorManager extends CompositeService { private static final Log LOG = LogFactory.getLog(TimelineCollectorManager.class); @@ -57,7 +57,7 @@ private boolean writerFlusherRunning; @Override - public void serviceInit(Configuration conf) throws Exception { + protected void serviceInit(Configuration conf) throws Exception { writer = createTimelineWriter(conf); writer.init(conf); // create a single dedicated thread for flushing the writer on a periodic diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/security/TimelineV2DelegationTokenSecretManagerService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/security/TimelineV2DelegationTokenSecretManagerService.java new file mode 100644 index 0000000..eef8436 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/security/TimelineV2DelegationTokenSecretManagerService.java @@ -0,0 +1,78 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.timelineservice.security; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; +import org.apache.hadoop.yarn.server.timeline.security.TimelineDelgationTokenSecretManagerService; + +/** + * The service wrapper of {@link TimelineV2DelegationTokenSecretManager}. + */ +public class TimelineV2DelegationTokenSecretManagerService extends + TimelineDelgationTokenSecretManagerService { + public TimelineV2DelegationTokenSecretManagerService() { + super(TimelineV2DelegationTokenSecretManagerService.class.getName()); + } + + @Override + protected AbstractDelegationTokenSecretManager + + createTimelineDelegationTokenSecretManager(long secretKeyInterval, + long tokenMaxLifetime, long tokenRenewInterval, + long tokenRemovalScanInterval) { + return new TimelineV2DelegationTokenSecretManager(secretKeyInterval, + tokenMaxLifetime, tokenRenewInterval, tokenRemovalScanInterval); + } + + /** + * Delegation token secret manager for ATSv2. + */ + @Private + @Unstable + public static class TimelineV2DelegationTokenSecretManager extends + AbstractDelegationTokenSecretManager { + + /** + * Create a timeline v2 secret manager. + * @param delegationKeyUpdateInterval the number of milliseconds for rolling + * new secret keys. + * @param delegationTokenMaxLifetime the maximum lifetime of the delegation + * tokens in milliseconds + * @param delegationTokenRenewInterval how often the tokens must be renewed + * in milliseconds + * @param delegationTokenRemoverScanInterval how often the tokens are + * scanned for expired tokens in milliseconds + */ + public TimelineV2DelegationTokenSecretManager( + long delegationKeyUpdateInterval, long delegationTokenMaxLifetime, + long delegationTokenRenewInterval, + long delegationTokenRemoverScanInterval) { + super(delegationKeyUpdateInterval, delegationTokenMaxLifetime, + delegationTokenRenewInterval, delegationTokenRemoverScanInterval); + } + + @Override + public TimelineDelegationTokenIdentifier createIdentifier() { + return new TimelineDelegationTokenIdentifier(); + } + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/security/package-info.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/security/package-info.java new file mode 100644 index 0000000..8250092 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/security/package-info.java @@ -0,0 +1,25 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * Package org.apache.hadoop.server.timelineservice.security contains classes + * to be used to generate delegation tokens for ATSv2. + */ +@InterfaceAudience.Private +package org.apache.hadoop.yarn.server.timelineservice.security; +import org.apache.hadoop.classification.InterfaceAudience;