diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 48b7ccdb6f1064b5a28823e4b43b8ac1ae031179..f4195de3addd2c9db9c6721c21c43be6602dfe56 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -1947,6 +1947,11 @@ private static void addDeprecatedKeys() {
public static final int DEFAULT_CLUSTER_LEVEL_APPLICATION_PRIORITY = 0;
+ public static final String APP_ATTEMPT_DIAGNOSTICS_CAPACITY_BYTES =
+ YARN_PREFIX + "app.attempt.diagnostics.capacity.bytes";
+
+ public static final int DEFAULT_APP_ATTEMPT_DIAGNOSTICS_CAPACITY_BYTES = 65_536;
+
@Private
public static boolean isDistributedNodeLabelConfiguration(Configuration conf) {
return DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE.equals(conf.get(
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
index 29994e5b8c44cacda8ee06acae3af2ef46c42888..247d9c53d8877a1ba3124ec3120ecfd7b8767bdc 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml
@@ -117,6 +117,15 @@
org.apache.hadoop
hadoop-annotations
+
+
+ junit
+ junit
+ test
+
org.mockito
mockito-all
@@ -138,11 +147,6 @@
protobuf-java
- junit
- junit
- test
-
-
com.sun.jersey.jersey-test-framework
jersey-test-framework-grizzly2
test
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java
new file mode 100644
index 0000000000000000000000000000000000000000..14af4359d9bae8017a927dd6d944f55deae0c131
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java
@@ -0,0 +1,148 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import java.io.Serializable;
+import java.util.Deque;
+
+/**
+ * An {@link Appendable} implementation that considers its {@link #capacity} as upper bound. While
+ * {@link #append(CharSequence) append}ing the lengths of the past input values are gathered in a {@link Deque deque}.
+ *
+ * When {@link #capacity} would be reached on append, if possible, at head of {@link #pastInputValues past input values}, some of
+ * the {@link String}s are {@link StringBuilder#delete(int, int) delete}d first considering:
+ *
+ *
+ * - only whole {@link CharSequence}s {@link #append(CharSequence) append}ed before are deleted
+ * - first-in-first-out, meaning head first semantics
+ * - combined length of all the {@link CharSequence}s deleted in one go should be at least the length of the new value
+ *
+ *
+ * Note that null values are {@link #append(CharSequence) append}ed just like in
+ * {@link StringBuilder#append(CharSequence) original implementation}.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class BoundedAppender implements Appendable, Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private final int capacity;
+ private final StringBuilder pastInputValues = new StringBuilder();
+ private final Deque pastInputLengths = Lists.newLinkedList();
+
+ public BoundedAppender() {
+ this(65_536);
+ }
+
+ public BoundedAppender(final int capacity) {
+ Preconditions.checkArgument(capacity > 0, "capacity should be positive");
+
+ this.capacity = capacity;
+ }
+
+ @Override
+ public BoundedAppender append(CharSequence csq) {
+ csq = ensureNull(csq);
+
+ final int inputLength = csq.length();
+
+ checkAndCut(inputLength);
+ pastInputValues.append(csq);
+ pastInputLengths.add(inputLength);
+
+ return this;
+ }
+
+ private CharSequence ensureNull(final CharSequence csq) {
+ if (csq == null) {
+ return "null";
+ }
+
+ return csq;
+ }
+
+ private boolean shouldCut(final int inputLength) {
+ return lengthToCut(inputLength) > 0;
+ }
+
+ private int lengthToCut(final int inputLength) {
+ return pastInputValues.length() + inputLength - capacity;
+ }
+
+ private void cutAtLeast(final int minimalCutLength) {
+ Preconditions.checkArgument(minimalCutLength > 0, "nothing to cut");
+ Preconditions.checkState(pastInputLengths.size() > 0, "no past input to cut");
+ Preconditions.checkState(pastInputValues.length() >= minimalCutLength,
+ String.format("cannot cut %d, could cut at most %d", pastInputValues.length(), minimalCutLength));
+
+ int lengthCut = minimalCutLength;
+ while (lengthCut > 0) {
+ int actualCutLength = pastInputLengths.poll();
+ pastInputValues.delete(0, actualCutLength);
+ lengthCut -= actualCutLength;
+ }
+ }
+
+ private void checkAndCut(final int inputLength) {
+ Preconditions.checkArgument(inputLength <= capacity,
+ String.format("cannot append with length %d, above capacity %d", inputLength, capacity));
+
+ if (shouldCut(inputLength)) {
+ cutAtLeast(lengthToCut(inputLength));
+ }
+ }
+
+ @Override
+ public BoundedAppender append(CharSequence csq, final int start, final int end) {
+ csq = ensureNull(csq);
+
+ final int inputLength = end - start;
+ Preconditions.checkArgument(start >= 0, "start index should be non-negative");
+ Preconditions.checkArgument(end > 0, "end index should be positive");
+ Preconditions.checkArgument(end <= csq.length(), "end index should not exceed length");
+ Preconditions.checkArgument(inputLength > 0, "end index should come after start index");
+
+ checkAndCut(inputLength);
+ pastInputValues.append(csq, start, end);
+ pastInputLengths.add(inputLength);
+
+ return this;
+ }
+
+ @Override
+ public BoundedAppender append(final char c) {
+ final int inputLength = 1;
+
+ checkAndCut(inputLength);
+ pastInputValues.append(c);
+ pastInputLengths.add(inputLength);
+
+ return this;
+ }
+
+ @Override
+ public String toString() {
+ return pastInputValues.toString();
+ }
+}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index 8b3a3affe9e8e7a16fb771b5ba0cf5df52798fce..420c4bc4e87e524d780b575dc207036a731575ac 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -2157,4 +2157,13 @@
0
+
+
+ Defines the capacity of the diagnostics message of an application attempt, in bytes.
+ Needed because ZooKeeper's StateStore cannot accomodate any message length.
+
+ yarn.app.attempt.diagnostics.capacity.bytes
+ 65536
+
+
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/BoundedAppenderTest.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/BoundedAppenderTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..00e4d9f4d6c34ce70eb8496d6e8f8b7bdd5457a2
--- /dev/null
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/BoundedAppenderTest.java
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.util;
+
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import static org.junit.Assert.assertEquals;
+
+public class BoundedAppenderTest {
+ @Rule
+ public ExpectedException expected = ExpectedException.none();
+
+ @Before
+ public void setUp() throws Exception {
+
+ }
+
+ @Test
+ public void initWithZeroCapacityThrowsException() {
+ expected.expect(IllegalArgumentException.class);
+ expected.expectMessage("capacity should be positive");
+
+ new BoundedAppender(0);
+ }
+
+ @Test
+ public void initWithPositiveCapacitySuccess() {
+ new BoundedAppender(1);
+ }
+
+ @Test
+ public void nothingAppendedNothingRead() {
+ final BoundedAppender boundedAppender = new BoundedAppender();
+
+ assertEquals("nothing appended, nothing read", "", boundedAppender.toString());
+ }
+
+ @Test
+ public void nullAppendedNullStringRead() {
+ final BoundedAppender boundedAppender = new BoundedAppender();
+ boundedAppender.append(null);
+
+ assertEquals("null appended, \"null\" read", "null", boundedAppender.toString());
+ }
+
+ @Test
+ public void tryToAppendAboveCapacityThrowsException() {
+ final BoundedAppender boundedAppender = new BoundedAppender(1);
+
+ expected.expect(IllegalArgumentException.class);
+ expected.expectMessage("cannot append with length 2, above capacity 1");
+
+ boundedAppender.append("ab");
+ }
+
+ @Test
+ public void appendBelowCapacityOnceValueIsReadCorrectly() {
+ final BoundedAppender boundedAppender = new BoundedAppender(2);
+
+ boundedAppender.append("ab");
+
+ assertEquals("value appended is read correctly", "ab", boundedAppender.toString());
+ }
+
+ @Test
+ public void appendValuesBelowCapacityAreReadCorrectlyInFifoOrder() {
+ final BoundedAppender boundedAppender = new BoundedAppender(3);
+
+ boundedAppender.append("ab");
+ boundedAppender.append("cd");
+ boundedAppender.append("e");
+ boundedAppender.append("fg");
+
+ assertEquals("last values appended fitting capacity are read correctly", "efg", boundedAppender.toString());
+ }
+
+ @Test
+ public void appendValuesWithInvalidIndicesThrowsException() {
+ final BoundedAppender boundedAppender = new BoundedAppender(1);
+
+ expected.expect(IllegalArgumentException.class);
+ expected.expectMessage("start index should be non-negative");
+
+ boundedAppender.append("a", -1, 0);
+
+ expected.expectMessage("end index should be positive");
+
+ boundedAppender.append("a", 0, 0);
+
+ expected.expectMessage("end index should not exceed length");
+
+ boundedAppender.append("a", 0, 2);
+
+ expected.expectMessage("end index should come after start index");
+
+ boundedAppender.append("a", 1, 1);
+ }
+
+ @Test
+ public void nullAppendedWithValidIndicesNullRead() {
+ final BoundedAppender boundedAppender = new BoundedAppender(4);
+
+ boundedAppender.append(null, 0, 4);
+
+ assertEquals("null appended with valid indices, \"null\" read", "null", boundedAppender.toString());
+ }
+
+ @Test
+ public void appendValueWithValidIndicesIsReadCorrectly() {
+ final BoundedAppender boundedAppender = new BoundedAppender(2);
+
+ boundedAppender.append("abcd", 1, 3);
+
+ assertEquals("value appended with valid indices is read correctly", "bc", boundedAppender.toString());
+ }
+
+ @Test
+ public void appendValuesWithValidIndicesAreReadCorrectlyInFifoOrder() {
+ final BoundedAppender boundedAppender = new BoundedAppender(3);
+
+ boundedAppender.append("abcd", 1, 3);
+ boundedAppender.append("efgh", 0, 2);
+ boundedAppender.append("i", 0, 1);
+ boundedAppender.append("jk", 0, 2);
+
+ assertEquals("last values appended with valid parameters fitting capacity are read correctly", "ijk",
+ boundedAppender.toString());
+ }
+
+ @Test
+ public void appendOneCharIsReadCorrectly() {
+ final BoundedAppender boundedAppender = new BoundedAppender(1);
+
+ boundedAppender.append('a');
+
+ assertEquals("one char appended is read correctly", "a", boundedAppender.toString());
+ }
+
+ @Test
+ public void appendMultipleCharsAreReadCorrectlyInFifoOrder() {
+ final BoundedAppender boundedAppender = new BoundedAppender(2);
+
+ boundedAppender.append('a');
+ boundedAppender.append('b');
+ boundedAppender.append('c');
+ boundedAppender.append('d');
+
+ assertEquals("multiple chars appended are read correctly, the ones deleted in FIFO order", "cd",
+ boundedAppender.toString());
+ }
+}
\ No newline at end of file
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
index 0914022c258bff0997f1184964cb87bbc47999b3..d9286c713d2b1c716417b9ba2770d4193d0d61b3 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
@@ -36,7 +36,6 @@
import javax.crypto.SecretKey;
-import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -59,6 +58,7 @@
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
@@ -101,6 +101,7 @@
import org.apache.hadoop.yarn.state.SingleArcTransition;
import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory;
+import org.apache.hadoop.yarn.util.BoundedAppender;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
import com.google.common.annotations.VisibleForTesting;
@@ -161,7 +162,7 @@
// Set to null initially. Will eventually get set
// if an RMAppAttemptUnregistrationEvent occurs
private FinalApplicationStatus finalStatus = null;
- private final StringBuilder diagnostics = new StringBuilder();
+ private final BoundedAppender diagnostics;
private int amContainerExitStatus = ContainerExitStatus.INVALID;
private Configuration conf;
@@ -454,6 +455,10 @@ public RMAppAttemptImpl(ApplicationAttemptId appAttemptId,
new RMAppAttemptMetrics(applicationAttemptId, rmContext);
this.amReq = amReq;
+
+ this.diagnostics = new BoundedAppender(
+ conf.getInt(YarnConfiguration.APP_ATTEMPT_DIAGNOSTICS_CAPACITY_BYTES,
+ YarnConfiguration.DEFAULT_APP_ATTEMPT_DIAGNOSTICS_CAPACITY_BYTES));
}
@Override