diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineDelegationTokenResponse.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineDelegationTokenResponse.java new file mode 100644 index 0000000..90d13a2 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineDelegationTokenResponse.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.api.records.timeline; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; + +/** + * The response of delegation token related request + */ +@XmlRootElement(name = "delegationtoken") +@XmlAccessorType(XmlAccessType.NONE) +@Public +@Unstable +public class TimelineDelegationTokenResponse { + + private String type; + private Object content; + + public TimelineDelegationTokenResponse() { + + } + + @XmlElement(name = "type") + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + @XmlElement(name = "content") + public Object getContent() { + return content; + } + + public void setContent(Object content) { + this.content = content; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java index 37c0046..c326260 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelinePutResponse.java @@ -107,6 +107,17 @@ public void setErrors(List errors) { */ public static final int IO_EXCEPTION = 2; + /** + * Error code returned if the user specifies the timeline system reserved + * filter key + */ + public static final int SYSTEM_FILTER_CONFLICT = 3; + + /** + * Error code returned if the user is denied to access the timeline data + */ + public static final int ACCESS_DENY = 4; + private String entityId; private String entityType; private int errorCode; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index fe3c1e1..2d9dcb1 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -1136,14 +1136,6 @@ public static final String DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS = "0.0.0.0:" + DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_PORT; - /**The kerberos principal to be used for spnego filter for timeline service.*/ - public static final String TIMELINE_SERVICE_WEBAPP_SPNEGO_USER_NAME_KEY = - TIMELINE_SERVICE_PREFIX + "webapp.spnego-principal"; - - /**The kerberos keytab to be used for spnego filter for timeline service.*/ - public static final String TIMELINE_SERVICE_WEBAPP_SPNEGO_KEYTAB_FILE_KEY = - TIMELINE_SERVICE_PREFIX + "webapp.spnego-keytab-file"; - /** Timeline service store class */ public static final String TIMELINE_SERVICE_STORE = TIMELINE_SERVICE_PREFIX + "store-class"; @@ -1196,6 +1188,14 @@ public static final long DEFAULT_TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS = 1000 * 60 * 5; + /** The keytab for the timeline server.*/ + public static final String TIMELINE_SERVICE_KEYTAB = + TIMELINE_SERVICE_PREFIX + "keytab"; + + /** The Kerberos principal for the timeline server.*/ + public static final String TIMELINE_SERVICE_PRINCIPAL = + TIMELINE_SERVICE_PREFIX + "principal"; + //////////////////////////////// // Other Configs //////////////////////////////// diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java index fda0a4f..e2e85e1 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/TimelineClient.java @@ -18,16 +18,24 @@ package org.apache.hadoop.yarn.client.api; +import java.io.File; import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.Token; import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; import org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl; +import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; +import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; +import org.codehaus.jackson.map.ObjectMapper; /** * A client library that can be used to post some information in terms of a @@ -67,4 +75,78 @@ protected TimelineClient(String name) { public abstract TimelinePutResponse putEntities( TimelineEntity... entities) throws IOException, YarnException; + /** + *

+ * Get a delegation token so as to be able to talk to the timeline server in a + * secure way. + *

+ * + * @param renewer + * Address of the renewer who can renew these tokens when needed by + * securely talking to the timeline server + * @return a delegation token ({@link Token}) that can be used to talk to the + * timeline server + * @throws IOException + * @throws YarnException + */ + @Public + public abstract Token getDelegationToken( + String renewer) throws IOException, YarnException; + + /** + * Submit timeline data in a JSON file via command line. + * + * @param argv path to the {@link TimelineEntities} JSON file + */ + public static void main(String[] argv) { + if (argv.length != 1) { + System.err.println("Usage: "); + System.exit(-1); + } + File jsonFile = new File(argv[0]); + if (!jsonFile.exists()) { + System.out.println("Error: File [" + jsonFile.getAbsolutePath() + + "] doesn't exist"); + return; + } + ObjectMapper mapper = new ObjectMapper(); + YarnJacksonJaxbJsonProvider.configObjectMapper(mapper); + TimelineEntities entities = null; + try { + entities = mapper.readValue(jsonFile, TimelineEntities.class); + } catch (Exception e) { + System.err.println("Error: " + e.getMessage()); + e.printStackTrace(System.err); + return; + } + TimelineClient client = TimelineClient.createTimelineClient(); + client.init(new YarnConfiguration()); + client.start(); + try { + if (UserGroupInformation.isSecurityEnabled()) { + Token token = + client.getDelegationToken( + UserGroupInformation.getCurrentUser().getUserName()); + UserGroupInformation.getCurrentUser().addToken(token); + } + TimelinePutResponse response = client.putEntities( + entities.getEntities().toArray( + new TimelineEntity[entities.getEntities().size()])); + if (response.getErrors().size() == 0) { + System.out.println("Timeline data is successfully put"); + } else { + for (TimelinePutResponse.TimelinePutError error : response.getErrors()) { + System.out.println("TimelineEntity [" + error.getEntityType() + ":" + + error.getEntityId() + "] is not successfully put. Error code: " + + error.getErrorCode()); + } + } + } catch (Exception e) { + System.err.println("Error: " + e.getMessage()); + e.printStackTrace(System.err); + } finally { + client.stop(); + } + } + } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java index 64cc041..04d2bd9 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java @@ -19,8 +19,13 @@ package org.apache.hadoop.yarn.client.api.impl; import java.io.IOException; +import java.net.HttpURLConnection; import java.net.URI; +import java.net.URL; import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; import javax.ws.rs.core.MediaType; @@ -29,12 +34,20 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.client.AuthenticatedURL; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; +import org.apache.hadoop.yarn.security.client.TimelineKerberosAuthenticator; import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; import com.google.common.annotations.VisibleForTesting; @@ -44,6 +57,8 @@ import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.config.ClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig; +import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory; +import com.sun.jersey.client.urlconnection.URLConnectionClientHandler; @Private @Unstable @@ -56,12 +71,18 @@ private Client client; private URI resURI; private boolean isEnabled; + private TimelineAuthenticatedURLConnectionFactory urlFactory; public TimelineClientImpl() { super(TimelineClientImpl.class.getName()); ClientConfig cc = new DefaultClientConfig(); cc.getClasses().add(YarnJacksonJaxbJsonProvider.class); - client = Client.create(cc); + if (UserGroupInformation.isSecurityEnabled()) { + urlFactory = new TimelineAuthenticatedURLConnectionFactory(); + client = new Client(new URLConnectionClientHandler(urlFactory), cc); + } else { + client = Client.create(cc); + } } protected void serviceInit(Configuration conf) throws Exception { @@ -124,6 +145,13 @@ public TimelinePutResponse putEntities( return resp.getEntity(TimelinePutResponse.class); } + @Override + public Token getDelegationToken( + String renewer) throws IOException, YarnException { + return TimelineKerberosAuthenticator.getDelegationToken(resURI.toURL(), + urlFactory.token, renewer); + } + @Private @VisibleForTesting public ClientResponse doPostingEntities(TimelineEntities entities) { @@ -133,4 +161,70 @@ public ClientResponse doPostingEntities(TimelineEntities entities) { .post(ClientResponse.class, entities); } + private static class TimelineAuthenticatedURLConnectionFactory + implements HttpURLConnectionFactory { + + private AuthenticatedURL.Token token; + private TimelineKerberosAuthenticator authenticator; + private Token dToken; + + public TimelineAuthenticatedURLConnectionFactory() { + token = new AuthenticatedURL.Token(); + authenticator = new TimelineKerberosAuthenticator(); + } + + @Override + public HttpURLConnection getHttpURLConnection(URL url) throws IOException { + try { + if (dToken == null) { + //TODO: need to take care of the renew case + dToken = selectToken(); + if (LOG.isDebugEnabled()) { + LOG.debug("Timeline delegation token: " + dToken.toString()); + } + } + if (dToken != null) { + Map params = new HashMap(); + TimelineKerberosAuthenticator.injectDelegationToken(params, dToken); + url = TimelineKerberosAuthenticator.appendParams(url, params); + if (LOG.isDebugEnabled()) { + LOG.debug("URL with delegation token: " + url); + } + } + return new AuthenticatedURL(authenticator).openConnection(url, token); + } catch (AuthenticationException e) { + LOG.error("Authentication failed when openning connection [" + url + + "] with token [" + token + "].", e); + throw new IOException(e); + } + } + + @SuppressWarnings("unchecked") + private Token selectToken() { + UserGroupInformation ugi; + try { + ugi = UserGroupInformation.getCurrentUser(); + } catch (IOException e) { + String msg = "Error when getting the current user"; + LOG.error(msg, e); + throw new YarnRuntimeException(msg, e); + } + Collection> tokens = ugi.getTokens(); + for (Token token : tokens) { + TokenIdentifier identifier = null; + try { + identifier = token.decodeIdentifier(); + } catch (IOException e) { + String msg = "Error when getting the token identifier"; + LOG.error(msg, e); + throw new YarnRuntimeException(msg, e); + } + if (identifier instanceof TimelineDelegationTokenIdentifier) { + return (Token) token; + } + } + return null; + } + } + } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java index 8a0348b..700f79a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/impl/YarnClientImpl.java @@ -19,6 +19,8 @@ package org.apache.hadoop.yarn.client.api.impl; import java.io.IOException; +import java.net.InetSocketAddress; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; @@ -29,8 +31,14 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.DataInputByteBuffer; +import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.RPC; +import org.apache.hadoop.security.Credentials; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.api.ApplicationClientProtocol; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse; @@ -64,6 +72,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerReport; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.NodeState; @@ -74,6 +83,7 @@ import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.client.ClientRMProxy; import org.apache.hadoop.yarn.client.api.AHSClient; +import org.apache.hadoop.yarn.client.api.TimelineClient; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.client.api.YarnClientApplication; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -82,6 +92,7 @@ import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.Records; @@ -97,8 +108,11 @@ protected long submitPollIntervalMillis; private long asyncApiPollIntervalMillis; private long asyncApiPollTimeoutMillis; - protected AHSClient historyClient; + private AHSClient historyClient; private boolean historyServiceEnabled; + private TimelineClient timelineClient; + private Text timelineService; + private boolean timelineServiceEnabled; private static final String ROOT = "root"; @@ -126,10 +140,30 @@ protected void serviceInit(Configuration conf) throws Exception { if (conf.getBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED, YarnConfiguration.DEFAULT_APPLICATION_HISTORY_ENABLED)) { historyServiceEnabled = true; - historyClient = AHSClientImpl.createAHSClient(); - historyClient.init(getConfig()); + historyClient = AHSClient.createAHSClient(); + historyClient.init(conf); } + if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ENABLED)) { + timelineServiceEnabled = true; + timelineClient = TimelineClient.createTimelineClient(); + timelineClient.init(conf); + + InetSocketAddress timelineServiceAddr = null; + if (YarnConfiguration.useHttps(getConfig())) { + timelineServiceAddr = getConfig().getSocketAddr( + YarnConfiguration.TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_PORT); + } else { + timelineServiceAddr = getConfig().getSocketAddr( + YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_PORT); + } + timelineService = SecurityUtil.buildTokenService(timelineServiceAddr); + } super.serviceInit(conf); } @@ -141,6 +175,9 @@ protected void serviceStart() throws Exception { if (historyServiceEnabled) { historyClient.start(); } + if (timelineServiceEnabled) { + timelineClient.start(); + } } catch (IOException e) { throw new YarnRuntimeException(e); } @@ -155,6 +192,9 @@ protected void serviceStop() throws Exception { if (historyServiceEnabled) { historyClient.stop(); } + if (timelineServiceEnabled) { + timelineClient.stop(); + } super.serviceStop(); } @@ -189,6 +229,12 @@ public YarnClientApplication createApplication() Records.newRecord(SubmitApplicationRequest.class); request.setApplicationSubmissionContext(appContext); + // Automatically add the timeline DT into the CLC + // Only when the security and the timeline service are both enabled + if (UserGroupInformation.isSecurityEnabled() && timelineServiceEnabled) { + addTimelineDelegationToken(appContext.getAMContainerSpec()); + } + //TODO: YARN-1763:Handle RM failovers during the submitApplication call. rmClient.submitApplication(request); @@ -238,6 +284,42 @@ public YarnClientApplication createApplication() return applicationId; } + private void addTimelineDelegationToken( + ContainerLaunchContext clc) throws YarnException, IOException { + org.apache.hadoop.security.token.Token timelineDelegationToken = + timelineClient.getDelegationToken( + UserGroupInformation.getCurrentUser().getUserName()); + if (timelineDelegationToken == null) { + return; + } + Credentials credentials = new Credentials(); + DataInputByteBuffer dibb = new DataInputByteBuffer(); + ByteBuffer tokens = clc.getTokens(); + if (tokens != null) { + dibb.reset(tokens); + credentials.readTokenStorageStream(dibb); + tokens.rewind(); + } + // If the timeline delegation token is already in the CLC, no need to add + // one more + for (org.apache.hadoop.security.token.Token token : credentials + .getAllTokens()) { + TokenIdentifier tokenIdentifier = token.decodeIdentifier(); + if (tokenIdentifier instanceof TimelineDelegationTokenIdentifier) { + return; + } + } + credentials.addToken(timelineService, timelineDelegationToken); + if (LOG.isDebugEnabled()) { + LOG.debug("Add timline delegation token into credentials: " + + timelineDelegationToken); + } + DataOutputBuffer dob = new DataOutputBuffer(); + credentials.writeTokenStorageToStream(dob); + tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); + clc.setTokens(tokens); + } + @Override public void killApplication(ApplicationId applicationId) throws YarnException, IOException { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientTimelineSecurityInfo.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientTimelineSecurityInfo.java new file mode 100644 index 0000000..d2b1358 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/ClientTimelineSecurityInfo.java @@ -0,0 +1,83 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.security.client; + +import java.lang.annotation.Annotation; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.KerberosInfo; +import org.apache.hadoop.security.SecurityInfo; +import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.security.token.TokenInfo; +import org.apache.hadoop.security.token.TokenSelector; +import org.apache.hadoop.yarn.api.ApplicationHistoryProtocolPB; +import org.apache.hadoop.yarn.conf.YarnConfiguration; + + +@Public +@Unstable +public class ClientTimelineSecurityInfo extends SecurityInfo { + @Override + public KerberosInfo getKerberosInfo(Class protocol, Configuration conf) { + if (!protocol + .equals(ApplicationHistoryProtocolPB.class)) { + return null; + } + return new KerberosInfo() { + + @Override + public Class annotationType() { + return null; + } + + @Override + public String serverPrincipal() { + return YarnConfiguration.TIMELINE_SERVICE_PRINCIPAL; + } + + @Override + public String clientPrincipal() { + return null; + } + }; + } + + @Override + public TokenInfo getTokenInfo(Class protocol, Configuration conf) { + if (!protocol + .equals(ApplicationHistoryProtocolPB.class)) { + return null; + } + return new TokenInfo() { + + @Override + public Class annotationType() { + return null; + } + + @Override + public Class> + value() { + return TimelineDelegationTokenSelector.class; + } + }; + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineDelegationTokenIdentifier.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineDelegationTokenIdentifier.java new file mode 100644 index 0000000..82e0d69 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineDelegationTokenIdentifier.java @@ -0,0 +1,63 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.security.client; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; + +@Public +@Unstable +public class TimelineDelegationTokenIdentifier extends AbstractDelegationTokenIdentifier { + + public static final Text KIND_NAME = new Text("TIMELINE_DELEGATION_TOKEN"); + + public TimelineDelegationTokenIdentifier() { + + } + + /** + * Create a new timeline delegation token identifier + * + * @param owner the effective username of the token owner + * @param renewer the username of the renewer + * @param realUser the real username of the token owner + */ + public TimelineDelegationTokenIdentifier(Text owner, Text renewer, + Text realUser) { + super(owner, renewer, realUser); + } + + @Override + public Text getKind() { + return KIND_NAME; + } + + @InterfaceAudience.Private + public static class Renewer extends Token.TrivialRenewer { + @Override + protected Text getKind() { + return KIND_NAME; + } + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineDelegationTokenSelector.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineDelegationTokenSelector.java new file mode 100644 index 0000000..62f708a --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineDelegationTokenSelector.java @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.security.client; + +import java.util.Collection; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.security.token.TokenSelector; + +@Public +@Unstable +public class TimelineDelegationTokenSelector + implements TokenSelector { + + private static final Log LOG = LogFactory + .getLog(TimelineDelegationTokenSelector.class); + + @SuppressWarnings("unchecked") + public Token selectToken(Text service, + Collection> tokens) { + if (service == null) { + return null; + } + LOG.debug("Looking for a token with service " + service.toString()); + for (Token token : tokens) { + LOG.debug("Token kind is " + token.getKind().toString() + + " and the token's service name is " + token.getService()); + if (TimelineDelegationTokenIdentifier.KIND_NAME.equals(token.getKind()) + && service.equals(token.getService())) { + return (Token) token; + } + } + return null; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineKerberosAuthenticator.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineKerberosAuthenticator.java new file mode 100644 index 0000000..fb1cb8e --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/TimelineKerberosAuthenticator.java @@ -0,0 +1,282 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.security.client; + +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.net.HttpURLConnection; +import java.net.URL; +import java.net.URLEncoder; +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.security.authentication.client.AuthenticatedURL; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authentication.client.Authenticator; +import org.apache.hadoop.security.authentication.client.KerberosAuthenticator; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.yarn.api.records.timeline.TimelineDelegationTokenResponse; +import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPut; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; + +/** + * A KerberosAuthenticator subclass that fallback to + * {@link TimelineKerberosAuthenticator}. + */ + +@Private +@Unstable +public class TimelineKerberosAuthenticator extends KerberosAuthenticator { + + public static final String ERROR_EXCEPTION_JSON = "exception"; + public static final String ERROR_CLASSNAME_JSON = "javaClassName"; + public static final String ERROR_MESSAGE_JSON = "message"; + + public static final String OP_PARAM = "op"; + public static final String DELEGATION_PARAM = "delegation"; + public static final String TOKEN_PARAM = "token"; + public static final String RENEWER_PARAM = "renewer"; + public static final String DELEGATION_TOKEN_URL = "url"; + public static final String DELEGATION_TOKEN_EXPIRATION_TIME = + "expirationTime"; + + private static ObjectMapper mapper; + + static { + mapper = new ObjectMapper(); + YarnJacksonJaxbJsonProvider.configObjectMapper(mapper); + } + + /** + * Returns the fallback authenticator if the server does not use Kerberos + * SPNEGO HTTP authentication. + * + * @return a {@link TimelineKerberosAuthenticator} instance. + */ + @Override + protected Authenticator getFallBackAuthenticator() { + return new TimelineKerberosAuthenticator(); + } + + /** + * DelegationToken operations. + */ + @Private + public static enum DelegationTokenOperation { + GETDELEGATIONTOKEN(HttpGet.METHOD_NAME, true), + RENEWDELEGATIONTOKEN(HttpPut.METHOD_NAME, true), + CANCELDELEGATIONTOKEN(HttpPut.METHOD_NAME, false); + + private String httpMethod; + private boolean requiresKerberosCredentials; + + private DelegationTokenOperation(String httpMethod, + boolean requiresKerberosCredentials) { + this.httpMethod = httpMethod; + this.requiresKerberosCredentials = requiresKerberosCredentials; + } + + public String getHttpMethod() { + return httpMethod; + } + + public boolean requiresKerberosCredentials() { + return requiresKerberosCredentials; + } + + } + + public static void injectDelegationToken(Map params, + Token dtToken) + throws IOException { + if (dtToken != null) { + params.put(DELEGATION_PARAM, dtToken.encodeToUrlString()); + } + } + + private boolean hasDelegationToken(URL url) { + return url.getQuery().contains(DELEGATION_PARAM + "="); + } + + @Override + public void authenticate(URL url, AuthenticatedURL.Token token) + throws IOException, AuthenticationException { + if (!hasDelegationToken(url)) { + super.authenticate(url, token); + } + } + + public static Token getDelegationToken( + URL url, AuthenticatedURL.Token token, String renewer) throws IOException { + DelegationTokenOperation op = + DelegationTokenOperation.GETDELEGATIONTOKEN; + Map params = new HashMap(); + params.put(OP_PARAM, op.toString()); + params.put(RENEWER_PARAM, renewer); + url = appendParams(url, params); + AuthenticatedURL aUrl = + new AuthenticatedURL(new TimelineKerberosAuthenticator()); + try { + HttpURLConnection conn = aUrl.openConnection(url, token); + conn.setRequestMethod(op.getHttpMethod()); + TimelineDelegationTokenResponse dtRes = + validateAndParseResponse(conn, HttpURLConnection.HTTP_OK); + if (!dtRes.getType().equals(DELEGATION_TOKEN_URL)) { + throw new IOException("The response content is not expected: " + + dtRes.getContent()); + } + String tokenStr = dtRes.getContent().toString(); + Token dToken = + new Token(); + dToken.decodeFromUrlString(tokenStr); + return dToken; + } catch (AuthenticationException ex) { + throw new IOException(ex.toString(), ex); + } + } + + public static long renewDelegationToken(URL url, + AuthenticatedURL.Token token, + Token dToken) throws IOException { + Map params = new HashMap(); + params.put(OP_PARAM, + DelegationTokenOperation.RENEWDELEGATIONTOKEN.toString()); + params.put(TOKEN_PARAM, dToken.encodeToUrlString()); + url = appendParams(url, params); + AuthenticatedURL aUrl = + new AuthenticatedURL(new TimelineKerberosAuthenticator()); + try { + HttpURLConnection conn = aUrl.openConnection(url, token); + conn.setRequestMethod( + DelegationTokenOperation.RENEWDELEGATIONTOKEN.getHttpMethod()); + TimelineDelegationTokenResponse dtRes = + validateAndParseResponse(conn, HttpURLConnection.HTTP_OK); + if (!dtRes.getType().equals(DELEGATION_TOKEN_EXPIRATION_TIME)) { + throw new IOException("The response content is not expected: " + + dtRes.getContent()); + } + return Long.valueOf(dtRes.getContent().toString()); + } catch (AuthenticationException ex) { + throw new IOException(ex.toString(), ex); + } + } + + public static void cancelDelegationToken(URL url, + AuthenticatedURL.Token token, + Token dToken) throws IOException { + Map params = new HashMap(); + params.put(OP_PARAM, + DelegationTokenOperation.CANCELDELEGATIONTOKEN.toString()); + params.put(TOKEN_PARAM, dToken.encodeToUrlString()); + url = appendParams(url, params); + AuthenticatedURL aUrl = + new AuthenticatedURL(new TimelineKerberosAuthenticator()); + try { + HttpURLConnection conn = aUrl.openConnection(url, token); + conn.setRequestMethod(DelegationTokenOperation.CANCELDELEGATIONTOKEN + .getHttpMethod()); + validateAndParseResponse(conn, HttpURLConnection.HTTP_OK); + } catch (AuthenticationException ex) { + throw new IOException(ex.toString(), ex); + } + } + + /** + * Convenience method that appends parameters an HTTP URL. + * + * @param path + * the file path. + * @param params + * the query string parameters. + * + * @return a URL + * + * @throws IOException + * thrown if an IO error occurs. + */ + public static URL appendParams(URL url, Map params) + throws IOException { + StringBuilder sb = new StringBuilder(); + sb.append(url); + String separator = url.toString().contains("?") ? "&" : "?"; + for (Map.Entry entry : params.entrySet()) { + sb.append(separator).append(entry.getKey()).append("="). + append(URLEncoder.encode(entry.getValue(), "UTF8")); + separator = "&"; + } + return new URL(sb.toString()); + } + + /** + * Validates the status of an HttpURLConnection against an + * expected HTTP status code. If the current status code is not the expected + * one it throws an exception with a detail message using Server side error + * messages if available. Otherwise, {@link TimelineDelegationTokenResponse} + * will be parsed and returned. + * + * @param conn + * the HttpURLConnection. + * @param expected + * the expected HTTP status code. + * @return + * @throws IOException + * thrown if the current status code does not match the expected one + * or the JSON response cannot be parsed correctly + */ + private static TimelineDelegationTokenResponse validateAndParseResponse( + HttpURLConnection conn, int expected) throws IOException { + int status = conn.getResponseCode(); + JsonNode json = mapper.readTree(conn.getInputStream()); + if (status != expected) { + try { + String message = json.get(ERROR_MESSAGE_JSON).getTextValue(); + String exception = json.get(ERROR_EXCEPTION_JSON).getTextValue(); + String className = json.get(ERROR_CLASSNAME_JSON).getTextValue(); + + try { + ClassLoader cl = TimelineKerberosAuthenticator.class.getClassLoader(); + Class klass = cl.loadClass(className); + Constructor constr = klass.getConstructor(String.class); + throw (IOException) constr.newInstance(message); + } catch (IOException ex) { + throw ex; + } catch (Exception ex) { + throw new IOException(MessageFormat.format("{0} - {1}", exception, + message)); + } + } catch (IOException ex) { + if (ex.getCause() instanceof IOException) { + throw (IOException) ex.getCause(); + } + throw new IOException( + MessageFormat.format("HTTP status [{0}], {1}", + status, conn.getResponseMessage())); + } + } else { + return mapper.readValue(json, TimelineDelegationTokenResponse.class); + } + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java index 324c6f6..a62ed48 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/timeline/TimelineUtils.java @@ -22,12 +22,10 @@ import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; import org.codehaus.jackson.JsonGenerationException; -import org.codehaus.jackson.map.AnnotationIntrospector; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; -import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion; -import org.codehaus.jackson.xc.JaxbAnnotationIntrospector; /** * The helper class for the timeline module. @@ -41,9 +39,7 @@ static { mapper = new ObjectMapper(); - AnnotationIntrospector introspector = new JaxbAnnotationIntrospector(); - mapper.setAnnotationIntrospector(introspector); - mapper.setSerializationInclusion(Inclusion.NON_NULL); + YarnJacksonJaxbJsonProvider.configObjectMapper(mapper); } /** diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/YarnJacksonJaxbJsonProvider.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/YarnJacksonJaxbJsonProvider.java index ac0ea7f..3cc1aec 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/YarnJacksonJaxbJsonProvider.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/YarnJacksonJaxbJsonProvider.java @@ -49,9 +49,14 @@ public YarnJacksonJaxbJsonProvider() { @Override public ObjectMapper locateMapper(Class type, MediaType mediaType) { ObjectMapper mapper = super.locateMapper(type, mediaType); + configObjectMapper(mapper); + return mapper; + } + + public static void configObjectMapper(ObjectMapper mapper) { AnnotationIntrospector introspector = new JaxbAnnotationIntrospector(); mapper.setAnnotationIntrospector(introspector); mapper.setSerializationInclusion(Inclusion.NON_NULL); - return mapper; } + } \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo index babc2fb..d90a267 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.SecurityInfo @@ -12,6 +12,7 @@ # limitations under the License. # org.apache.hadoop.yarn.security.client.ClientRMSecurityInfo +org.apache.hadoop.yarn.security.client.ClientTimelineSecurityInfo org.apache.hadoop.yarn.security.ContainerManagerSecurityInfo org.apache.hadoop.yarn.security.SchedulerSecurityInfo org.apache.hadoop.yarn.security.admin.AdminSecurityInfo diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.token.TokenIdentifier hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.token.TokenIdentifier index d860461..a4ad548 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.token.TokenIdentifier +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.token.TokenIdentifier @@ -15,4 +15,5 @@ org.apache.hadoop.yarn.security.ContainerTokenIdentifier org.apache.hadoop.yarn.security.AMRMTokenIdentifier org.apache.hadoop.yarn.security.client.ClientToAMTokenIdentifier org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier +org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier org.apache.hadoop.yarn.security.NMTokenIdentifier diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.token.TokenRenewer hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.token.TokenRenewer index dd0b2c4..9fcfa19 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.token.TokenRenewer +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/META-INF/services/org.apache.hadoop.security.token.TokenRenewer @@ -14,3 +14,4 @@ org.apache.hadoop.yarn.security.AMRMTokenIdentifier$Renewer org.apache.hadoop.yarn.security.ContainerTokenIdentifier$Renewer org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier$Renewer +org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier$Renewer diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java index 731ae14..a68b6a3 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java @@ -18,12 +18,17 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice; +import java.io.IOException; +import java.net.InetSocketAddress; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.source.JvmMetrics; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.service.CompositeService; import org.apache.hadoop.service.Service; import org.apache.hadoop.util.ExitUtil; @@ -33,8 +38,11 @@ import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineACLsManager; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineAuthenticationFilterInitializer; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineDelegationTokenSecretManagerService; import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApps; @@ -52,9 +60,11 @@ private static final Log LOG = LogFactory .getLog(ApplicationHistoryServer.class); - ApplicationHistoryClientService ahsClientService; - ApplicationHistoryManager historyManager; - TimelineStore timelineStore; + private ApplicationHistoryClientService ahsClientService; + private ApplicationHistoryManager historyManager; + private TimelineStore timelineStore; + private TimelineACLsManager timelineACLsManager; + private TimelineDelegationTokenSecretManagerService secretManagerService; private WebApp webApp; public ApplicationHistoryServer() { @@ -69,13 +79,22 @@ protected void serviceInit(Configuration conf) throws Exception { addService((Service) historyManager); timelineStore = createTimelineStore(conf); addIfService(timelineStore); + timelineACLsManager = createTimelineACLsManager(conf); + secretManagerService = createTimelineDelegationTokenSecretManagerService(conf); + addService(secretManagerService); + + DefaultMetricsSystem.initialize("ApplicationHistoryServer"); + JvmMetrics.initSingleton("ApplicationHistoryServer", null); super.serviceInit(conf); } @Override protected void serviceStart() throws Exception { - DefaultMetricsSystem.initialize("ApplicationHistoryServer"); - JvmMetrics.initSingleton("ApplicationHistoryServer", null); + try { + doSecureLogin(getConfig()); + } catch(IOException ie) { + throw new YarnRuntimeException("Failed to login", ie); + } startWebApp(); super.serviceStart(); @@ -148,27 +167,51 @@ protected TimelineStore createTimelineStore( TimelineStore.class), conf); } + protected TimelineACLsManager createTimelineACLsManager(Configuration conf) { + return new TimelineACLsManager(conf); + } + + protected TimelineDelegationTokenSecretManagerService + createTimelineDelegationTokenSecretManagerService(Configuration conf) { + return new TimelineDelegationTokenSecretManagerService(); + } + protected void startWebApp() { - String bindAddress = WebAppUtils.getAHSWebAppURLWithoutScheme(getConfig()); + Configuration conf = getConfig(); + // Play trick to make the customized filter will only be loaded by the + // timeline server when security is enabled. + if (UserGroupInformation.isSecurityEnabled()) { + String initializers = conf.get("hadoop.http.filter.initializers"); + initializers = + initializers == null || initializers.length() == 0 ? "" : "," + + initializers; + if (!initializers.contains( + TimelineAuthenticationFilterInitializer.class.getName())) { + conf.set("hadoop.http.filter.initializers", + TimelineAuthenticationFilterInitializer.class.getName() + + initializers); + } + } + String bindAddress = WebAppUtils.getAHSWebAppURLWithoutScheme(conf); LOG.info("Instantiating AHSWebApp at " + bindAddress); try { + AHSWebApp ahsWebApp = AHSWebApp.getInstance(); + ahsWebApp.setApplicationHistoryManager(historyManager); + ahsWebApp.setTimelineStore(timelineStore); + ahsWebApp.setTimelineACLsManager(timelineACLsManager); + ahsWebApp.setTimelineDelegationTokenSecretManagerService(secretManagerService); webApp = WebApps .$for("applicationhistory", ApplicationHistoryClientService.class, - ahsClientService, "ws") - .with(getConfig()) - .withHttpSpnegoPrincipalKey( - YarnConfiguration.TIMELINE_SERVICE_WEBAPP_SPNEGO_USER_NAME_KEY) - .withHttpSpnegoKeytabKey( - YarnConfiguration.TIMELINE_SERVICE_WEBAPP_SPNEGO_KEYTAB_FILE_KEY) - .at(bindAddress) - .start(new AHSWebApp(historyManager, timelineStore)); + ahsClientService, "ws") + .with(conf).at(bindAddress).start(ahsWebApp); } catch (Exception e) { String msg = "AHSWebApp failed to start."; LOG.error(msg, e); throw new YarnRuntimeException(msg, e); } } + /** * @return ApplicationTimelineStore */ @@ -177,4 +220,27 @@ protected void startWebApp() { public TimelineStore getTimelineStore() { return timelineStore; } + + protected TimelineDelegationTokenSecretManagerService + getTimelineDelegationTokenSecretManagerService() { + return secretManagerService; + } + + private void doSecureLogin(Configuration conf) throws IOException { + InetSocketAddress socAddr = getBindAddress(conf); + SecurityUtil.login(conf, YarnConfiguration.TIMELINE_SERVICE_KEYTAB, + YarnConfiguration.TIMELINE_SERVICE_PRINCIPAL, socAddr.getHostName()); + } + + /** + * Retrieve the timeline server bind address from configuration + * + * @param conf + * @return InetSocketAddress + */ + private static InetSocketAddress getBindAddress(Configuration conf) { + return conf.getSocketAddr(YarnConfiguration.TIMELINE_SERVICE_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_PORT); + } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java index 86ac1f8..06f3d60 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java @@ -19,7 +19,6 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; @@ -40,8 +39,8 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; -import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity; +import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError; /** @@ -314,15 +313,29 @@ private static TimelineEntity maskFields( entityToReturn.setEntityId(entity.getEntityId()); entityToReturn.setEntityType(entity.getEntityType()); entityToReturn.setStartTime(entity.getStartTime()); - entityToReturn.setEvents(fields.contains(Field.EVENTS) ? - entity.getEvents() : fields.contains(Field.LAST_EVENT_ONLY) ? - Arrays.asList(entity.getEvents().get(0)) : null); - entityToReturn.setRelatedEntities(fields.contains(Field.RELATED_ENTITIES) ? - entity.getRelatedEntities() : null); - entityToReturn.setPrimaryFilters(fields.contains(Field.PRIMARY_FILTERS) ? - entity.getPrimaryFilters() : null); - entityToReturn.setOtherInfo(fields.contains(Field.OTHER_INFO) ? - entity.getOtherInfo() : null); + // Deep copy + if (fields.contains(Field.EVENTS)) { + entityToReturn.addEvents(entity.getEvents()); + } else if (fields.contains(Field.LAST_EVENT_ONLY)) { + entityToReturn.addEvent(entity.getEvents().get(0)); + } else { + entityToReturn.setEvents(null); + } + if (fields.contains(Field.RELATED_ENTITIES)) { + entityToReturn.addRelatedEntities(entity.getRelatedEntities()); + } else { + entityToReturn.setRelatedEntities(null); + } + if (fields.contains(Field.PRIMARY_FILTERS)) { + entityToReturn.addPrimaryFilters(entity.getPrimaryFilters()); + } else { + entityToReturn.setPrimaryFilters(null); + } + if (fields.contains(Field.OTHER_INFO)) { + entityToReturn.addOtherInfo(entity.getOtherInfo()); + } else { + entityToReturn.setOtherInfo(null); + } return entityToReturn; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java index 6b50d83..9d6c466 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStore.java @@ -18,12 +18,25 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.service.Service; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; -@InterfaceAudience.Private -@InterfaceStability.Unstable +@Private +@Unstable public interface TimelineStore extends Service, TimelineReader, TimelineWriter { + + /** + * The system filter which will be automatically added to a + * {@link TimelineEntity}'s primary filter section when storing the entity. + * The filter key is case sensitive. Users are supposed not to use the key + * reserved by the timeline system. + */ + @Private + enum TimelineSystemFilter { + TIMELINE_SYSTEM_FILTER_OWNER + } + } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineACLsManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineACLsManager.java new file mode 100644 index 0000000..b52fa1e --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineACLsManager.java @@ -0,0 +1,87 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; + +import java.io.IOException; +import java.util.Set; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.EntityIdentifier; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore.TimelineSystemFilter; + +import com.google.common.annotations.VisibleForTesting; + +/** + * TimelineACLsManager check the entity level timeline data access. + */ +@Private +public class TimelineACLsManager { + + private static final Log LOG = LogFactory.getLog(TimelineACLsManager.class); + + private boolean aclsEnabled; + + public TimelineACLsManager(Configuration conf) { + aclsEnabled = conf.getBoolean(YarnConfiguration.YARN_ACL_ENABLE, + YarnConfiguration.DEFAULT_YARN_ACL_ENABLE); + } + + public boolean checkAccess(String user, + TimelineEntity entity) throws YarnException, IOException { + if (LOG.isDebugEnabled()) { + LOG.debug("Verifying the access of " + user + + " on the timeline entity " + + new EntityIdentifier(entity.getEntityId(), entity.getEntityType())); + } + + if (!aclsEnabled) { + return true; + } + + Set values = + entity.getPrimaryFilters().get( + TimelineSystemFilter.TIMELINE_SYSTEM_FILTER_OWNER.toString()); + if (values == null || values.size() != 1) { + throw new YarnException("Owner information of the timeline entity " + + new EntityIdentifier(entity.getEntityId(), entity.getEntityType()) + + " is corrupted."); + } + String owner = values.iterator().next().toString(); + // TODO: Currently we just check the user is the timeline entity owner. In + // the future, we need to check whether the user is admin or is in the + // allowed user/group list + if (user.equals(owner)) { + return true; + } + return false; + } + + @Private + @VisibleForTesting + public void setACLsEnabled(boolean aclsEnabled) { + this.aclsEnabled = aclsEnabled; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilter.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilter.java new file mode 100644 index 0000000..81f6353 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilter.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; + +import java.util.Properties; + +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.security.authentication.server.AuthenticationFilter; + +@Private +@Unstable +public class TimelineAuthenticationFilter extends AuthenticationFilter { + + @Override + protected Properties getConfiguration(String configPrefix, + FilterConfig filterConfig) throws ServletException { + Properties properties = super.getConfiguration(configPrefix, filterConfig); + if (properties.getProperty(AUTH_TYPE).equals("kerberos")) { + properties.setProperty( + AUTH_TYPE, TimelineKerberosAuthenticationHandler.class.getName()); + } + return properties; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilterInitializer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilterInitializer.java new file mode 100644 index 0000000..faeef71 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineAuthenticationFilterInitializer.java @@ -0,0 +1,124 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; + +import java.io.FileReader; +import java.io.IOException; +import java.io.Reader; +import java.util.HashMap; +import java.util.Map; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.http.FilterContainer; +import org.apache.hadoop.http.FilterInitializer; +import org.apache.hadoop.http.HttpServer2; +import org.apache.hadoop.security.SecurityUtil; + +/** + *

+ * Initializes {@link TimelineAuthenticationFilter} which provides support for + * Kerberos HTTP SPNEGO authentication. + *

+ *

+ * It enables Kerberos HTTP SPNEGO plus delegation token authentication for the + * timeline server. + *

+ * Refer to the core-default.xml file, after the comment 'HTTP + * Authentication' for details on the configuration options. All related + * configuration properties have 'hadoop.http.authentication.' as prefix. + */ +public class TimelineAuthenticationFilterInitializer extends FilterInitializer { + + private static final String PREFIX = "yarn.timeline-service.http.authentication."; + + private static final String SIGNATURE_SECRET_FILE = + TimelineAuthenticationFilter.SIGNATURE_SECRET + ".file"; + + /** + *

+ * Initializes {@link TimelineAuthenticationFilter} + *

+ *

+ * Propagates to {@link TimelineAuthenticationFilter} configuration all YARN + * configuration properties prefixed with + * "yarn.timeline-service.authentication." + *

+ * + * @param container + * The filter container + * @param conf + * Configuration for run-time parameters + */ + @Override + public void initFilter(FilterContainer container, Configuration conf) { + Map filterConfig = new HashMap(); + + // setting the cookie path to root '/' so it is used for all resources. + filterConfig.put(TimelineAuthenticationFilter.COOKIE_PATH, "/"); + + for (Map.Entry entry : conf) { + String name = entry.getKey(); + if (name.startsWith(PREFIX)) { + String value = conf.get(name); + name = name.substring(PREFIX.length()); + filterConfig.put(name, value); + } + } + + String signatureSecretFile = filterConfig.get(SIGNATURE_SECRET_FILE); + if (signatureSecretFile != null) { + try { + StringBuilder secret = new StringBuilder(); + Reader reader = new FileReader(signatureSecretFile); + int c = reader.read(); + while (c > -1) { + secret.append((char) c); + c = reader.read(); + } + reader.close(); + filterConfig + .put(TimelineAuthenticationFilter.SIGNATURE_SECRET, + secret.toString()); + } catch (IOException ex) { + throw new RuntimeException( + "Could not read HTTP signature secret file: " + + signatureSecretFile); + } + } + + // Resolve _HOST into bind address + String bindAddress = conf.get(HttpServer2.BIND_ADDRESS); + String principal = + filterConfig.get(TimelineKerberosAuthenticationHandler.PRINCIPAL); + if (principal != null) { + try { + principal = SecurityUtil.getServerPrincipal(principal, bindAddress); + } catch (IOException ex) { + throw new RuntimeException( + "Could not resolve Kerberos principal name: " + ex.toString(), ex); + } + filterConfig.put(TimelineKerberosAuthenticationHandler.PRINCIPAL, + principal); + } + + container.addGlobalFilter("Timeline Authentication Filter", + TimelineAuthenticationFilter.class.getName(), + filterConfig); + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineDelegationTokenSecretManagerService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineDelegationTokenSecretManagerService.java new file mode 100644 index 0000000..fee9eb4 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineDelegationTokenSecretManagerService.java @@ -0,0 +1,189 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; + +import java.io.ByteArrayInputStream; +import java.io.DataInputStream; +import java.io.IOException; +import java.net.InetSocketAddress; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; +import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; + +/** + * The service wrapper of {@link TimelineDelegationTokenSecretManager} + */ +@Private +@Unstable +public class TimelineDelegationTokenSecretManagerService extends AbstractService { + + private TimelineDelegationTokenSecretManager secretManager = null; + private InetSocketAddress serviceAddr = null; + + public TimelineDelegationTokenSecretManagerService() { + super(TimelineDelegationTokenSecretManagerService.class.getName()); + } + + @Override + protected void serviceInit(Configuration conf) throws Exception { + long secretKeyInterval = + conf.getLong(YarnConfiguration.DELEGATION_KEY_UPDATE_INTERVAL_KEY, + YarnConfiguration.DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT); + long tokenMaxLifetime = + conf.getLong(YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_KEY, + YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT); + long tokenRenewInterval = + conf.getLong(YarnConfiguration.DELEGATION_TOKEN_RENEW_INTERVAL_KEY, + YarnConfiguration.DELEGATION_TOKEN_RENEW_INTERVAL_DEFAULT); + secretManager = new TimelineDelegationTokenSecretManager(secretKeyInterval, + tokenMaxLifetime, tokenRenewInterval, + 3600000); + secretManager.startThreads(); + + if (YarnConfiguration.useHttps(getConfig())) { + serviceAddr = getConfig().getSocketAddr( + YarnConfiguration.TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_HTTPS_PORT); + } else { + serviceAddr = getConfig().getSocketAddr( + YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_ADDRESS, + YarnConfiguration.DEFAULT_TIMELINE_SERVICE_WEBAPP_PORT); + } + super.init(conf); + } + + @Override + protected void serviceStop() throws Exception { + secretManager.stopThreads(); + super.stop(); + } + + /** + * Creates a delegation token. + * + * @param ugi UGI creating the token. + * @param renewer token renewer. + * @return new delegation token. + * @throws IOException thrown if the token could not be created. + */ + public Token createToken( + UserGroupInformation ugi, String renewer) throws IOException { + renewer = (renewer == null) ? ugi.getShortUserName() : renewer; + String user = ugi.getUserName(); + Text owner = new Text(user); + Text realUser = null; + if (ugi.getRealUser() != null) { + realUser = new Text(ugi.getRealUser().getUserName()); + } + TimelineDelegationTokenIdentifier tokenIdentifier = + new TimelineDelegationTokenIdentifier(owner, new Text(renewer), realUser); + Token token = + new Token(tokenIdentifier, secretManager); + SecurityUtil.setTokenService(token, serviceAddr); + return token; + } + + /** + * Renews a delegation token. + * + * @param token delegation token to renew. + * @param renewer token renewer. + * @throws IOException thrown if the token could not be renewed. + */ + public long renewToken(Token token, + String renewer) throws IOException { + return secretManager.renewToken(token, renewer); + } + + /** + * Cancels a delegation token. + * + * @param token delegation token to cancel. + * @param canceler token canceler. + * @throws IOException thrown if the token could not be canceled. + */ + public void cancelToken(Token token, + String canceler) throws IOException { + secretManager.cancelToken(token, canceler); + } + + /** + * Verifies a delegation token. + * + * @param token delegation token to verify. + * @return the UGI for the token. + * @throws IOException thrown if the token could not be verified. + */ + public UserGroupInformation verifyToken(Token token) + throws IOException { + ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier()); + DataInputStream dis = new DataInputStream(buf); + TimelineDelegationTokenIdentifier id = new TimelineDelegationTokenIdentifier(); + try { + id.readFields(dis); + secretManager.verifyToken(id, token.getPassword()); + } finally { + dis.close(); + } + return id.getUser(); + } + + /** + * Create a timeline secret manager + * + * @param delegationKeyUpdateInterval + * the number of seconds for rolling new secret keys. + * @param delegationTokenMaxLifetime + * the maximum lifetime of the delegation tokens + * @param delegationTokenRenewInterval + * how often the tokens must be renewed + * @param delegationTokenRemoverScanInterval + * how often the tokens are scanned for expired tokens + */ + @Private + @Unstable + public static class TimelineDelegationTokenSecretManager extends + AbstractDelegationTokenSecretManager { + + public TimelineDelegationTokenSecretManager(long delegationKeyUpdateInterval, + long delegationTokenMaxLifetime, long delegationTokenRenewInterval, + long delegationTokenRemoverScanInterval) { + super(delegationKeyUpdateInterval, delegationTokenMaxLifetime, + delegationTokenRenewInterval, delegationTokenRemoverScanInterval); + } + + @Override + public TimelineDelegationTokenIdentifier createIdentifier() { + return new TimelineDelegationTokenIdentifier(); + } + + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineKerberosAuthenticationHandler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineKerberosAuthenticationHandler.java new file mode 100644 index 0000000..e87baa0 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TimelineKerberosAuthenticationHandler.java @@ -0,0 +1,233 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; + +import java.io.IOException; +import java.io.Writer; +import java.text.MessageFormat; +import java.util.HashSet; +import java.util.Set; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.core.MediaType; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authentication.server.AuthenticationToken; +import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.yarn.api.records.timeline.TimelineDelegationTokenResponse; +import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier; +import org.apache.hadoop.yarn.security.client.TimelineKerberosAuthenticator; +import org.apache.hadoop.yarn.security.client.TimelineKerberosAuthenticator.DelegationTokenOperation; +import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; +import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; +import org.codehaus.jackson.map.ObjectMapper; + +/** + * Server side AuthenticationHandler that authenticates requests + * using the incoming delegation token as a 'delegation' query string parameter. + *

+ * If not delegation token is present in the request it delegates to the + * {@link KerberosAuthenticationHandler} + */ +@Private +@Unstable +public class TimelineKerberosAuthenticationHandler + extends KerberosAuthenticationHandler { + + public static final String TYPE = "kerberos-dt"; + private static final Set DELEGATION_TOKEN_OPS = new HashSet(); + private static final String OP_PARAM = "op"; + private static final String ENTER = System.getProperty("line.separator"); + + private ObjectMapper mapper; + + static { + DELEGATION_TOKEN_OPS.add( + DelegationTokenOperation.GETDELEGATIONTOKEN.toString()); + DELEGATION_TOKEN_OPS.add( + DelegationTokenOperation.RENEWDELEGATIONTOKEN.toString()); + DELEGATION_TOKEN_OPS.add( + DelegationTokenOperation.CANCELDELEGATIONTOKEN.toString()); + } + + public TimelineKerberosAuthenticationHandler() { + super(); + mapper = new ObjectMapper(); + YarnJacksonJaxbJsonProvider.configObjectMapper(mapper); + } + + /** + * Returns authentication type of the handler. + * + * @return delegationtoken-kerberos + */ + @Override + public String getType() { + return TYPE; + } + + @Override + public boolean managementOperation(AuthenticationToken token, + HttpServletRequest request, HttpServletResponse response) + throws IOException, AuthenticationException { + boolean requestContinues = true; + String op = request.getParameter(OP_PARAM); + op = (op != null) ? op.toUpperCase() : null; + if (DELEGATION_TOKEN_OPS.contains(op) && + !request.getMethod().equals("OPTIONS")) { + DelegationTokenOperation dtOp = + DelegationTokenOperation.valueOf(op); + if (dtOp.getHttpMethod().equals(request.getMethod())) { + if (dtOp.requiresKerberosCredentials() && token == null) { + response.sendError(HttpServletResponse.SC_UNAUTHORIZED, + MessageFormat.format( + "Operation [{0}] requires SPNEGO authentication established", + dtOp)); + requestContinues = false; + } else { + TimelineDelegationTokenSecretManagerService secretManager = + AHSWebApp.getInstance() + .getTimelineDelegationTokenSecretManagerService(); + try { + TimelineDelegationTokenResponse res = null; + switch (dtOp) { + case GETDELEGATIONTOKEN: + UserGroupInformation ownerUGI = + UserGroupInformation.createRemoteUser(token.getUserName()); + String renewerParam = + request + .getParameter(TimelineKerberosAuthenticator.RENEWER_PARAM); + if (renewerParam == null) { + renewerParam = token.getUserName(); + } + Token dToken = secretManager.createToken(ownerUGI, renewerParam); + res = new TimelineDelegationTokenResponse(); + res.setType(TimelineKerberosAuthenticator.DELEGATION_TOKEN_URL); + res.setContent(dToken.encodeToUrlString()); + break; + case RENEWDELEGATIONTOKEN: + case CANCELDELEGATIONTOKEN: + String tokenParam = + request + .getParameter(TimelineKerberosAuthenticator.TOKEN_PARAM); + if (tokenParam == null) { + response.sendError(HttpServletResponse.SC_BAD_REQUEST, + MessageFormat.format( + "Operation [{0}] requires the parameter [{1}]", + dtOp, TimelineKerberosAuthenticator.TOKEN_PARAM)); + requestContinues = false; + } else { + if (dtOp == DelegationTokenOperation.CANCELDELEGATIONTOKEN) { + Token dt = + new Token(); + dt.decodeFromUrlString(tokenParam); + secretManager.cancelToken(dt, + UserGroupInformation.getCurrentUser().getUserName()); + } else { + Token dt = + new Token(); + dt.decodeFromUrlString(tokenParam); + long expirationTime = + secretManager.renewToken(dt, token.getUserName()); + res = new TimelineDelegationTokenResponse(); + res.setType(TimelineKerberosAuthenticator.DELEGATION_TOKEN_EXPIRATION_TIME); + res.setContent(expirationTime); + } + } + break; + } + if (requestContinues) { + response.setStatus(HttpServletResponse.SC_OK); + if (res != null) { + response.setContentType(MediaType.APPLICATION_JSON); + Writer writer = response.getWriter(); + mapper.writeValue(writer, res); + writer.write(ENTER); + writer.flush(); + + } + requestContinues = false; + } + } catch (IOException e) { + throw new AuthenticationException(e.toString(), e); + } + } + } else { + response + .sendError( + HttpServletResponse.SC_BAD_REQUEST, + MessageFormat + .format( + "Wrong HTTP method [{0}] for operation [{1}], it should be [{2}]", + request.getMethod(), dtOp, dtOp.getHttpMethod())); + requestContinues = false; + } + } + return requestContinues; + } + + /** + * Authenticates a request looking for the delegation + * query-string parameter and verifying it is a valid token. If there is not + * delegation query-string parameter, it delegates the + * authentication to the {@link KerberosAuthenticationHandler} unless it is + * disabled. + * + * @param request + * the HTTP client request. + * @param response + * the HTTP client response. + * + * @return the authentication token for the authenticated request. + * @throws IOException + * thrown if an IO error occurred. + * @throws AuthenticationException + * thrown if the authentication failed. + */ + @Override + public AuthenticationToken authenticate(HttpServletRequest request, + HttpServletResponse response) + throws IOException, AuthenticationException { + AuthenticationToken token; + String delegationParam = + request.getParameter(TimelineKerberosAuthenticator.DELEGATION_PARAM); + if (delegationParam != null) { + Token dt = + new Token(); + dt.decodeFromUrlString(delegationParam); + TimelineDelegationTokenSecretManagerService secretManager = + AHSWebApp.getInstance() + .getTimelineDelegationTokenSecretManagerService(); + UserGroupInformation ugi = secretManager.verifyToken(dt); + final String shortName = ugi.getShortUserName(); + // creating a ephemeral token + token = new AuthenticationToken(shortName, ugi.getUserName(), getType()); + token.setExpires(0); + } else { + token = super.authenticate(request, response); + } + return token; + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java index 93065b3..ae2daa2 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java @@ -19,25 +19,80 @@ import static org.apache.hadoop.yarn.util.StringHelper.pajoin; +import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.yarn.server.api.ApplicationContext; import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager; import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineACLsManager; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineDelegationTokenSecretManagerService; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; import org.apache.hadoop.yarn.webapp.YarnWebParams; +import com.google.common.annotations.VisibleForTesting; + public class AHSWebApp extends WebApp implements YarnWebParams { - private final ApplicationHistoryManager applicationHistoryManager; - private final TimelineStore timelineStore; + private ApplicationHistoryManager applicationHistoryManager; + private TimelineStore timelineStore; + private TimelineACLsManager timelineACLsManager; + private TimelineDelegationTokenSecretManagerService secretManagerService; + + private static AHSWebApp instance = null; + + public static AHSWebApp getInstance() { + if (instance == null) { + instance = new AHSWebApp(); + } + return instance; + } + + @Private + @VisibleForTesting + public static void resetInstance() { + instance = null; + } + + private AHSWebApp() { + + } + + public ApplicationHistoryManager getApplicationHistoryManager() { + return applicationHistoryManager; + } - public AHSWebApp(ApplicationHistoryManager applicationHistoryManager, - TimelineStore timelineStore) { + public void setApplicationHistoryManager( + ApplicationHistoryManager applicationHistoryManager) { this.applicationHistoryManager = applicationHistoryManager; + } + + public TimelineStore getTimelineStore() { + return timelineStore; + } + + public void setTimelineStore(TimelineStore timelineStore) { this.timelineStore = timelineStore; } + public TimelineACLsManager getTimelineACLsManager() { + return timelineACLsManager; + } + + public void setTimelineACLsManager(TimelineACLsManager timelineACLsManager) { + this.timelineACLsManager = timelineACLsManager; + } + + public TimelineDelegationTokenSecretManagerService + getTimelineDelegationTokenSecretManagerService() { + return secretManagerService; + } + + public void setTimelineDelegationTokenSecretManagerService( + TimelineDelegationTokenSecretManagerService secretManagerService) { + this.secretManagerService = secretManagerService; + } + @Override public void setup() { bind(YarnJacksonJaxbJsonProvider.class); @@ -46,6 +101,7 @@ public void setup() { bind(GenericExceptionHandler.class); bind(ApplicationContext.class).toInstance(applicationHistoryManager); bind(TimelineStore.class).toInstance(timelineStore); + bind(TimelineACLsManager.class).toInstance(timelineACLsManager); route("/", AHSController.class); route(pajoin("/apps", APP_STATE), AHSController.class); route(pajoin("/app", APPLICATION_ID), AHSController.class, "app"); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java index 44567ea..18fd942 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java @@ -25,6 +25,7 @@ import java.util.Collection; import java.util.EnumSet; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.SortedSet; @@ -52,17 +53,21 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.EntityIdentifier; import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper; import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair; import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field; import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineACLsManager; import org.apache.hadoop.yarn.util.timeline.TimelineUtils; import org.apache.hadoop.yarn.webapp.BadRequestException; +import org.apache.hadoop.yarn.webapp.NotFoundException; import com.google.inject.Inject; import com.google.inject.Singleton; @@ -75,10 +80,13 @@ private static final Log LOG = LogFactory.getLog(TimelineWebServices.class); private TimelineStore store; + private TimelineACLsManager timelineACLsManager; @Inject - public TimelineWebServices(TimelineStore store) { + public TimelineWebServices(TimelineStore store, + TimelineACLsManager timelineACLsManager) { this.store = store; + this.timelineACLsManager = timelineACLsManager; } @XmlRootElement(name = "about") @@ -141,6 +149,9 @@ public TimelineEntities getEntities( init(res); TimelineEntities entities = null; try { + EnumSet fieldEnums = parseFieldsStr(fields, ","); + boolean modified = extendFields(fieldEnums); + String user = getUser(req); entities = store.getEntities( parseStr(entityType), parseLongStr(limit), @@ -150,7 +161,33 @@ public TimelineEntities getEntities( parseLongStr(fromTs), parsePairStr(primaryFilter, ":"), parsePairsStr(secondaryFilter, ",", ":"), - parseFieldsStr(fields, ",")); + fieldEnums); + if (entities != null) { + Iterator entitiesItr = + entities.getEntities().iterator(); + while (entitiesItr.hasNext()) { + TimelineEntity entity = entitiesItr.next(); + try { + // check ACLs + if (!timelineACLsManager.checkAccess(user, entity)) { + entitiesItr.remove(); + } else { + // clean up system data + if (modified) { + entity.setPrimaryFilters(null); + } else { + cleanupOwnerInfo(entity); + } + } + } catch (YarnException e) { + LOG.error("Error when verifying access for user " + user + + " on the events of the timeline entity " + + new EntityIdentifier(entity.getEntityId(), + entity.getEntityType()), e); + entitiesItr.remove(); + } + } + } } catch (NumberFormatException e) { throw new BadRequestException( "windowStart, windowEnd or limit is not a numeric value."); @@ -182,9 +219,25 @@ public TimelineEntity getEntity( init(res); TimelineEntity entity = null; try { + EnumSet fieldEnums = parseFieldsStr(fields, ","); + boolean modified = extendFields(fieldEnums); entity = store.getEntity(parseStr(entityId), parseStr(entityType), - parseFieldsStr(fields, ",")); + fieldEnums); + if (entity != null) { + // check ACLs + String user = getUser(req); + if (!timelineACLsManager.checkAccess(user, entity)) { + entity = null; + } else { + // clean up the system data + if (modified) { + entity.setPrimaryFilters(null); + } else { + cleanupOwnerInfo(entity); + } + } + } } catch (IllegalArgumentException e) { throw new BadRequestException( "requested invalid field."); @@ -192,9 +245,15 @@ public TimelineEntity getEntity( LOG.error("Error getting entity", e); throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); + } catch (YarnException e) { + LOG.error("Error getting entity", e); + throw new WebApplicationException(e, + Response.Status.INTERNAL_SERVER_ERROR); } if (entity == null) { - throw new WebApplicationException(Response.Status.NOT_FOUND); + throw new NotFoundException("Timeline entity " + + new EntityIdentifier(parseStr(entityId), parseStr(entityType)) + + " is not found"); } return entity; } @@ -217,6 +276,7 @@ public TimelineEvents getEvents( init(res); TimelineEvents events = null; try { + String user = getUser(req); events = store.getEntityTimelines( parseStr(entityType), parseArrayStr(entityId, ","), @@ -224,6 +284,29 @@ public TimelineEvents getEvents( parseLongStr(windowStart), parseLongStr(windowEnd), parseArrayStr(eventType, ",")); + if (events != null) { + Iterator eventsItr = + events.getAllEvents().iterator(); + while (eventsItr.hasNext()) { + TimelineEvents.EventsOfOneEntity eventsOfOneEntity = eventsItr.next(); + try { + TimelineEntity entity = store.getEntity( + eventsOfOneEntity.getEntityId(), + eventsOfOneEntity.getEntityType(), + EnumSet.of(Field.PRIMARY_FILTERS)); + // check ACLs + if (!timelineACLsManager.checkAccess(user, entity)) { + eventsItr.remove(); + } + } catch (Exception e) { + LOG.error("Error when verifying access for user " + user + + " on the events of the timeline entity " + + new EntityIdentifier(eventsOfOneEntity.getEntityId(), + eventsOfOneEntity.getEntityType()), e); + eventsItr.remove(); + } + } + } } catch (NumberFormatException e) { throw new BadRequestException( "windowStart, windowEnd or limit is not a numeric value."); @@ -252,12 +335,60 @@ public TimelinePutResponse postEntities( if (entities == null) { return new TimelinePutResponse(); } + String user = getUser(req); try { List entityIDs = new ArrayList(); + TimelineEntities entitiesToPut = new TimelineEntities(); + List errors = + new ArrayList(); for (TimelineEntity entity : entities.getEntities()) { EntityIdentifier entityID = new EntityIdentifier(entity.getEntityId(), entity.getEntityType()); + + // check if there is existing entity + try { + TimelineEntity existingEntity = + store.getEntity(entityID.getId(), entityID.getType(), + EnumSet.of(Field.PRIMARY_FILTERS)); + if (existingEntity != null + && !timelineACLsManager.checkAccess(user, existingEntity)) { + throw new YarnException("The timeline entity " + entityID + + " was not put by " + user + " before"); + } + } catch (Exception e) { + // Skip the entity which already exists and was put by others + LOG.warn("Skip the timeline entity: " + entityID + ", because " + + e.getMessage()); + TimelinePutResponse.TimelinePutError error = + new TimelinePutResponse.TimelinePutError(); + error.setEntityId(entityID.getId()); + error.setEntityType(entityID.getType()); + error.setErrorCode( + TimelinePutResponse.TimelinePutError.ACCESS_DENY); + errors.add(error); + continue; + } + + // inject owner information for the access check + try { + injectOwnerInfo(entity, user); + } catch (YarnException e) { + // Skip the entity which messes up the primary filter and record the + // error + LOG.warn("Skip the timeline entity: " + entityID + ", because " + + e.getMessage()); + TimelinePutResponse.TimelinePutError error = + new TimelinePutResponse.TimelinePutError(); + error.setEntityId(entityID.getId()); + error.setEntityType(entityID.getType()); + error.setErrorCode( + TimelinePutResponse.TimelinePutError.SYSTEM_FILTER_CONFLICT); + errors.add(error); + continue; + } + entityIDs.add(entityID); + entitiesToPut.addEntity(entity); if (LOG.isDebugEnabled()) { LOG.debug("Storing the entity " + entityID + ", JSON-style content: " + TimelineUtils.dumpTimelineRecordtoJSON(entity)); @@ -266,7 +397,10 @@ public TimelinePutResponse postEntities( if (LOG.isDebugEnabled()) { LOG.debug("Storing entities: " + CSV_JOINER.join(entityIDs)); } - return store.put(entities); + TimelinePutResponse response = store.put(entitiesToPut); + // add the errors of timeline system filter key conflict + response.addErrors(errors); + return response; } catch (IOException e) { LOG.error("Error putting entities", e); throw new WebApplicationException(e, @@ -350,6 +484,14 @@ private static NameValuePair parsePairStr(String str, String delimiter) { } } + private static boolean extendFields(EnumSet fieldEnums) { + boolean modified = false; + if (fieldEnums != null && !fieldEnums.contains(Field.PRIMARY_FILTERS)) { + fieldEnums.add(Field.PRIMARY_FILTERS); + modified = true; + } + return modified; + } private static Long parseLongStr(String str) { return str == null ? null : Long.parseLong(str.trim()); } @@ -358,4 +500,34 @@ private static String parseStr(String str) { return str == null ? null : str.trim(); } + private static String getUser(HttpServletRequest req) { + String remoteUser = req.getRemoteUser(); + UserGroupInformation callerUGI = null; + if (remoteUser != null) { + callerUGI = UserGroupInformation.createRemoteUser(remoteUser); + } + return callerUGI == null ? "" : callerUGI.getShortUserName(); + } + + private static void injectOwnerInfo(TimelineEntity timelineEntity, + String owner) throws YarnException { + if (timelineEntity.getPrimaryFilters() != null && + timelineEntity.getPrimaryFilters().containsKey( + TimelineStore.TimelineSystemFilter.TIMELINE_SYSTEM_FILTER_OWNER)) { + throw new YarnException( + "User should not use the timeline system filter key: " + + TimelineStore.TimelineSystemFilter.TIMELINE_SYSTEM_FILTER_OWNER); + } + timelineEntity.addPrimaryFilter( + TimelineStore.TimelineSystemFilter.TIMELINE_SYSTEM_FILTER_OWNER + .toString(), owner); + } + + private static void cleanupOwnerInfo(TimelineEntity timelineEntity) { + if (timelineEntity.getPrimaryFilters() != null) { + timelineEntity.getPrimaryFilters().remove( + TimelineStore.TimelineSystemFilter.TIMELINE_SYSTEM_FILTER_OWNER.toString()); + } + } + } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java index a35fe46..3f3c08a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java @@ -44,6 +44,7 @@ import org.apache.hadoop.yarn.api.records.ContainerReport; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import org.junit.After; import org.junit.Before; @@ -74,6 +75,7 @@ public void setup() { @After public void tearDown() throws Exception { + AHSWebApp.resetInstance(); historyServer.stop(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java index d6d20af..5c55bec 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java @@ -26,6 +26,7 @@ import org.apache.hadoop.service.Service.STATE; import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; import org.junit.After; import org.junit.Test; @@ -40,7 +41,7 @@ public void testStartStopServer() throws Exception { Configuration config = new YarnConfiguration(); historyServer.init(config); assertEquals(STATE.INITED, historyServer.getServiceState()); - assertEquals(3, historyServer.getServices().size()); + assertEquals(4, historyServer.getServices().size()); ApplicationHistoryClientService historyService = historyServer.getClientService(); assertNotNull(historyServer.getClientService()); @@ -73,5 +74,6 @@ public void stop() { if (historyServer != null) { historyServer.stop(); } + AHSWebApp.resetInstance(); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TestTimelineACLsManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TestTimelineACLsManager.java new file mode 100644 index 0000000..8a09795 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/security/TestTimelineACLsManager.java @@ -0,0 +1,77 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; +import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.junit.Assert; +import org.junit.Test; + +public class TestTimelineACLsManager { + + @Test + public void testYarnACLsNotEnabled() throws Exception { + Configuration conf = new YarnConfiguration(); + conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false); + TimelineACLsManager timelineACLsManager = + new TimelineACLsManager(conf); + TimelineEntity entity = new TimelineEntity(); + entity.addPrimaryFilter( + TimelineStore.TimelineSystemFilter.TIMELINE_SYSTEM_FILTER_OWNER + .toString(), "owner"); + Assert.assertTrue("Always true when ACLs are not enabled", + timelineACLsManager.checkAccess("user", entity)); + } + + @Test + public void testYarnACLsEnabled() throws Exception { + Configuration conf = new YarnConfiguration(); + conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); + TimelineACLsManager timelineACLsManager = + new TimelineACLsManager(conf); + TimelineEntity entity = new TimelineEntity(); + entity.addPrimaryFilter( + TimelineStore.TimelineSystemFilter.TIMELINE_SYSTEM_FILTER_OWNER + .toString(), "owner"); + Assert.assertTrue("Owner should be allowed to access", + timelineACLsManager.checkAccess("owner", entity)); + Assert.assertFalse("Other shouldn't be allowed to access", + timelineACLsManager.checkAccess("other", entity)); + } + + @Test + public void testCorruptedOwnerInfo() throws Exception { + Configuration conf = new YarnConfiguration(); + conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); + TimelineACLsManager timelineACLsManager = + new TimelineACLsManager(conf); + TimelineEntity entity = new TimelineEntity(); + try { + timelineACLsManager.checkAccess("owner", entity); + Assert.fail("Exception is expected"); + } catch (YarnException e) { + Assert.assertTrue("It's not the exact expected exception", e.getMessage() + .contains("is corrupted.")); + } + } + +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java index 650184b..cc0b93c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java @@ -20,19 +20,32 @@ import static org.junit.Assert.assertEquals; +import java.io.IOException; + +import javax.inject.Singleton; +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletRequestWrapper; import javax.ws.rs.core.MediaType; -import org.junit.Assert; - +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents; import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse; -import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TestMemoryTimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore; +import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.security.TimelineACLsManager; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider; +import org.junit.Assert; import org.junit.Test; import com.google.inject.Guice; @@ -50,6 +63,8 @@ public class TestTimelineWebServices extends JerseyTest { private static TimelineStore store; + private static TimelineACLsManager timelineACLsManager; + private static String remoteUser; private long beforeTime; private Injector injector = Guice.createInjector(new ServletModule() { @@ -65,7 +80,12 @@ protected void configureServlets() { Assert.fail(); } bind(TimelineStore.class).toInstance(store); + Configuration conf = new YarnConfiguration(); + conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, false); + timelineACLsManager = new TimelineACLsManager(conf); + bind(TimelineACLsManager.class).toInstance(timelineACLsManager); serve("/*").with(GuiceContainer.class); + filter("/*").through(TestFilter.class); } }); @@ -340,8 +360,8 @@ public void testGetEvents() throws Exception { public void testPostEntities() throws Exception { TimelineEntities entities = new TimelineEntities(); TimelineEntity entity = new TimelineEntity(); - entity.setEntityId("test id"); - entity.setEntityType("test type"); + entity.setEntityId("test id 1"); + entity.setEntityType("test type 1"); entity.setStartTime(System.currentTimeMillis()); entities.addEntity(entity); WebResource r = resource(); @@ -355,14 +375,248 @@ public void testPostEntities() throws Exception { Assert.assertEquals(0, putResposne.getErrors().size()); // verify the entity exists in the store response = r.path("ws").path("v1").path("timeline") - .path("test type").path("test id") + .path("test type 1").path("test id 1") .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); entity = response.getEntity(TimelineEntity.class); Assert.assertNotNull(entity); - Assert.assertEquals("test id", entity.getEntityId()); - Assert.assertEquals("test type", entity.getEntityType()); + Assert.assertEquals("test id 1", entity.getEntityId()); + Assert.assertEquals("test type 1", entity.getEntityType()); + } + + @Test + public void testPostEntitiesWithYarnACLsEnabled() throws Exception { + timelineACLsManager.setACLsEnabled(true); + remoteUser = "tester"; + try { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id 2"); + entity.setEntityType("test type 2"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelinePutResponse putResponse = response.getEntity(TimelinePutResponse.class); + Assert.assertNotNull(putResponse); + Assert.assertEquals(0, putResponse.getErrors().size()); + + // override/append timeline data in the same entity with different user + remoteUser = "other"; + response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + putResponse = response.getEntity(TimelinePutResponse.class); + Assert.assertNotNull(putResponse); + Assert.assertEquals(1, putResponse.getErrors().size()); + Assert.assertEquals(TimelinePutResponse.TimelinePutError.ACCESS_DENY, + putResponse.getErrors().get(0).getErrorCode()); + } finally { + timelineACLsManager.setACLsEnabled(false); + remoteUser = null; + } + } + + @Test + public void testGetEntityWithYarnACLsEnabled() throws Exception { + timelineACLsManager.setACLsEnabled(true); + remoteUser = "tester"; + try { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id 3"); + entity.setEntityType("test type 3"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + // verify the system data will not be exposed + // 1. No field specification + response = r.path("ws").path("v1").path("timeline") + .path("test type 3").path("test id 3") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entity = response.getEntity(TimelineEntity.class); + Assert.assertNull(entity.getPrimaryFilters().get( + TimelineStore.TimelineSystemFilter.TIMELINE_SYSTEM_FILTER_OWNER.toString())); + // 2. other field + response = r.path("ws").path("v1").path("timeline") + .path("test type 3").path("test id 3") + .queryParam("fields", "relatedentities") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entity = response.getEntity(TimelineEntity.class); + Assert.assertNull(entity.getPrimaryFilters().get( + TimelineStore.TimelineSystemFilter.TIMELINE_SYSTEM_FILTER_OWNER.toString())); + // 3. primaryfilters field + response = r.path("ws").path("v1").path("timeline") + .path("test type 3").path("test id 3") + .queryParam("fields", "primaryfilters") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entity = response.getEntity(TimelineEntity.class); + Assert.assertNull(entity.getPrimaryFilters().get( + TimelineStore.TimelineSystemFilter.TIMELINE_SYSTEM_FILTER_OWNER.toString())); + + // get entity with other user + remoteUser = "other"; + response = r.path("ws").path("v1").path("timeline") + .path("test type 3").path("test id 3") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + assertEquals(ClientResponse.Status.NOT_FOUND, + response.getClientResponseStatus()); + } finally { + timelineACLsManager.setACLsEnabled(false); + remoteUser = null; + } + } + + @Test + public void testGetEntitiesWithYarnACLsEnabled() { + timelineACLsManager.setACLsEnabled(true); + remoteUser = "tester"; + try { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id 4"); + entity.setEntityType("test type 4"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + + remoteUser = "other"; + entities = new TimelineEntities(); + entity = new TimelineEntity(); + entity.setEntityId("test id 5"); + entity.setEntityType("test type 4"); + entity.setStartTime(System.currentTimeMillis()); + entities.addEntity(entity); + r = resource(); + response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + + response = r.path("ws").path("v1").path("timeline") + .path("test type 4") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + entities = response.getEntity(TimelineEntities.class); + assertEquals(1, entities.getEntities().size()); + assertEquals("test type 4", entities.getEntities().get(0).getEntityType()); + assertEquals("test id 5", entities.getEntities().get(0).getEntityId()); + } finally { + timelineACLsManager.setACLsEnabled(false); + remoteUser = null; + } + } + + @Test + public void testGetEventsWithYarnACLsEnabled() { + timelineACLsManager.setACLsEnabled(true); + remoteUser = "tester"; + try { + TimelineEntities entities = new TimelineEntities(); + TimelineEntity entity = new TimelineEntity(); + entity.setEntityId("test id 5"); + entity.setEntityType("test type 5"); + entity.setStartTime(System.currentTimeMillis()); + TimelineEvent event = new TimelineEvent(); + event.setEventType("event type 1"); + event.setTimestamp(System.currentTimeMillis()); + entity.addEvent(event); + entities.addEntity(entity); + WebResource r = resource(); + ClientResponse response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + + remoteUser = "other"; + entities = new TimelineEntities(); + entity = new TimelineEntity(); + entity.setEntityId("test id 6"); + entity.setEntityType("test type 5"); + entity.setStartTime(System.currentTimeMillis()); + event = new TimelineEvent(); + event.setEventType("event type 2"); + event.setTimestamp(System.currentTimeMillis()); + entity.addEvent(event); + entities.addEntity(entity); + r = resource(); + response = r.path("ws").path("v1").path("timeline") + .accept(MediaType.APPLICATION_JSON) + .type(MediaType.APPLICATION_JSON) + .post(ClientResponse.class, entities); + + response = r.path("ws").path("v1").path("timeline") + .path("test type 5").path("events") + .queryParam("entityId", "test id 5,test id 6") + .accept(MediaType.APPLICATION_JSON) + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + TimelineEvents events = response.getEntity(TimelineEvents.class); + assertEquals(1, events.getAllEvents().size()); + assertEquals("test id 6", events.getAllEvents().get(0).getEntityId()); + } finally { + timelineACLsManager.setACLsEnabled(false); + remoteUser = null; + } } + @Singleton + private static class TestFilter implements Filter { + + @Override + public void init(FilterConfig filterConfig) throws ServletException { + } + + @Override + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain chain) throws IOException, ServletException { + if (request instanceof HttpServletRequest) { + request = + new TestHttpServletRequestWrapper((HttpServletRequest) request); + } + chain.doFilter(request, response); + } + + @Override + public void destroy() { + } + + } + + private static class TestHttpServletRequestWrapper extends HttpServletRequestWrapper { + + public TestHttpServletRequestWrapper(HttpServletRequest request) { + super(request); + } + + @Override + public String getRemoteUser() { + return TestTimelineWebServices.remoteUser; + } + + } }