diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 7887fbce25063981062dfc72faf0d57a85decc81..4353ae63821478d062b47c1e7f9a49b10ef04347 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -2616,6 +2616,11 @@ public static boolean isAclEnabled(Configuration conf) { public static final int DEFAULT_CLUSTER_LEVEL_APPLICATION_PRIORITY = 0; + public static final String APP_ATTEMPT_DIAGNOSTICS_CAPACITY_BYTES = + YARN_PREFIX + "app.attempt.diagnostics.capacity.bytes"; + + public static final int DEFAULT_APP_ATTEMPT_DIAGNOSTICS_CAPACITY_BYTES = 65_536; + @Private public static boolean isDistributedNodeLabelConfiguration(Configuration conf) { return DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE.equals(conf.get( diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml index 57074448d6e773e2f4e31052a73ea158a92655aa..ab86e640e000b2504f63e583f084fd3df9f8c754 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/pom.xml @@ -108,6 +108,15 @@ org.apache.hadoop hadoop-annotations + + + junit + junit + test + org.mockito mockito-all @@ -129,11 +138,6 @@ protobuf-java - junit - junit - test - - org.bouncycastle bcprov-jdk16 test diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java new file mode 100644 index 0000000000000000000000000000000000000000..33eac18889387b431f18454818bb7c46e2da5bc3 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java @@ -0,0 +1,163 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.util; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +import java.io.Serializable; +import java.util.Deque; + +/** + * An {@link Appendable} implementation that considers its {@link #capacity} as upper bound. While + * {@link #append(CharSequence) append}ing the lengths of the past input values are gathered in a {@link Deque deque}. + *

+ * When {@link #capacity} would be reached on append, if possible, at head of {@link #pastInputValues past input values}, some of + * the {@link String}s are {@link StringBuilder#delete(int, int) delete}d first considering: + *

+ *

+ * Note that null values are {@link #append(CharSequence) append}ed just like in + * {@link StringBuilder#append(CharSequence) original implementation}. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class BoundedAppender implements Appendable, Serializable, CharSequence { + private static final long serialVersionUID = 1L; + + private final int capacity; + private final StringBuilder pastInputValues = new StringBuilder(); + private final Deque pastInputLengths = Lists.newLinkedList(); + + public BoundedAppender() { + this(65_536); + } + + public BoundedAppender(final int capacity) { + Preconditions.checkArgument(capacity > 0, "capacity should be positive"); + + this.capacity = capacity; + } + + @Override + public BoundedAppender append(CharSequence csq) { + csq = ensureNull(csq); + + final int inputLength = csq.length(); + + checkAndCut(inputLength); + pastInputValues.append(csq); + pastInputLengths.add(inputLength); + + return this; + } + + private CharSequence ensureNull(final CharSequence csq) { + if (csq == null) { + return "null"; + } + + return csq; + } + + private boolean shouldCut(final int inputLength) { + return lengthToCut(inputLength) > 0; + } + + private int lengthToCut(final int inputLength) { + return pastInputValues.length() + inputLength - capacity; + } + + private void cutAtLeast(final int minimalCutLength) { + Preconditions.checkArgument(minimalCutLength > 0, "nothing to cut"); + Preconditions.checkState(pastInputLengths.size() > 0, "no past input to cut"); + Preconditions.checkState(pastInputValues.length() >= minimalCutLength, + String.format("cannot cut %d, could cut at most %d", pastInputValues.length(), minimalCutLength)); + + int lengthCut = minimalCutLength; + while (lengthCut > 0) { + int actualCutLength = pastInputLengths.poll(); + pastInputValues.delete(0, actualCutLength); + lengthCut -= actualCutLength; + } + } + + private void checkAndCut(final int inputLength) { + Preconditions.checkArgument(inputLength <= capacity, + String.format("cannot append with length %d, above capacity %d", inputLength, capacity)); + + if (shouldCut(inputLength)) { + cutAtLeast(lengthToCut(inputLength)); + } + } + + @Override + public BoundedAppender append(CharSequence csq, final int start, final int end) { + csq = ensureNull(csq); + + final int inputLength = end - start; + Preconditions.checkArgument(start >= 0, "start index should be non-negative"); + Preconditions.checkArgument(end > 0, "end index should be positive"); + Preconditions.checkArgument(end <= csq.length(), "end index should not exceed length"); + Preconditions.checkArgument(inputLength > 0, "end index should come after start index"); + + checkAndCut(inputLength); + pastInputValues.append(csq, start, end); + pastInputLengths.add(inputLength); + + return this; + } + + @Override + public BoundedAppender append(final char c) { + final int inputLength = 1; + + checkAndCut(inputLength); + pastInputValues.append(c); + pastInputLengths.add(inputLength); + + return this; + } + + @Override + public int length() { + return pastInputValues.length(); + } + + @Override + public char charAt(final int index) { + return pastInputValues.charAt(index); + } + + @Override + public CharSequence subSequence(final int start, final int end) { + return pastInputValues.subSequence(start, end); + } + + @Override + public String toString() { + return pastInputValues.toString(); + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml index 1e929a8c5007e12d9d8c428880d33ea0e40390bf..5d9b9ecf5729d5c48eb5e5642561eb6589ef3e8b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml @@ -3014,4 +3014,13 @@ 3000 + + + Defines the capacity of the diagnostics message of an application attempt, in bytes. + Needed because ZooKeeper's StateStore cannot accomodate any message length. + + yarn.app.attempt.diagnostics.capacity.bytes + 65536 + + diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/BoundedAppenderTest.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/BoundedAppenderTest.java new file mode 100644 index 0000000000000000000000000000000000000000..00e4d9f4d6c34ce70eb8496d6e8f8b7bdd5457a2 --- /dev/null +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/BoundedAppenderTest.java @@ -0,0 +1,170 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.util; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import static org.junit.Assert.assertEquals; + +public class BoundedAppenderTest { + @Rule + public ExpectedException expected = ExpectedException.none(); + + @Before + public void setUp() throws Exception { + + } + + @Test + public void initWithZeroCapacityThrowsException() { + expected.expect(IllegalArgumentException.class); + expected.expectMessage("capacity should be positive"); + + new BoundedAppender(0); + } + + @Test + public void initWithPositiveCapacitySuccess() { + new BoundedAppender(1); + } + + @Test + public void nothingAppendedNothingRead() { + final BoundedAppender boundedAppender = new BoundedAppender(); + + assertEquals("nothing appended, nothing read", "", boundedAppender.toString()); + } + + @Test + public void nullAppendedNullStringRead() { + final BoundedAppender boundedAppender = new BoundedAppender(); + boundedAppender.append(null); + + assertEquals("null appended, \"null\" read", "null", boundedAppender.toString()); + } + + @Test + public void tryToAppendAboveCapacityThrowsException() { + final BoundedAppender boundedAppender = new BoundedAppender(1); + + expected.expect(IllegalArgumentException.class); + expected.expectMessage("cannot append with length 2, above capacity 1"); + + boundedAppender.append("ab"); + } + + @Test + public void appendBelowCapacityOnceValueIsReadCorrectly() { + final BoundedAppender boundedAppender = new BoundedAppender(2); + + boundedAppender.append("ab"); + + assertEquals("value appended is read correctly", "ab", boundedAppender.toString()); + } + + @Test + public void appendValuesBelowCapacityAreReadCorrectlyInFifoOrder() { + final BoundedAppender boundedAppender = new BoundedAppender(3); + + boundedAppender.append("ab"); + boundedAppender.append("cd"); + boundedAppender.append("e"); + boundedAppender.append("fg"); + + assertEquals("last values appended fitting capacity are read correctly", "efg", boundedAppender.toString()); + } + + @Test + public void appendValuesWithInvalidIndicesThrowsException() { + final BoundedAppender boundedAppender = new BoundedAppender(1); + + expected.expect(IllegalArgumentException.class); + expected.expectMessage("start index should be non-negative"); + + boundedAppender.append("a", -1, 0); + + expected.expectMessage("end index should be positive"); + + boundedAppender.append("a", 0, 0); + + expected.expectMessage("end index should not exceed length"); + + boundedAppender.append("a", 0, 2); + + expected.expectMessage("end index should come after start index"); + + boundedAppender.append("a", 1, 1); + } + + @Test + public void nullAppendedWithValidIndicesNullRead() { + final BoundedAppender boundedAppender = new BoundedAppender(4); + + boundedAppender.append(null, 0, 4); + + assertEquals("null appended with valid indices, \"null\" read", "null", boundedAppender.toString()); + } + + @Test + public void appendValueWithValidIndicesIsReadCorrectly() { + final BoundedAppender boundedAppender = new BoundedAppender(2); + + boundedAppender.append("abcd", 1, 3); + + assertEquals("value appended with valid indices is read correctly", "bc", boundedAppender.toString()); + } + + @Test + public void appendValuesWithValidIndicesAreReadCorrectlyInFifoOrder() { + final BoundedAppender boundedAppender = new BoundedAppender(3); + + boundedAppender.append("abcd", 1, 3); + boundedAppender.append("efgh", 0, 2); + boundedAppender.append("i", 0, 1); + boundedAppender.append("jk", 0, 2); + + assertEquals("last values appended with valid parameters fitting capacity are read correctly", "ijk", + boundedAppender.toString()); + } + + @Test + public void appendOneCharIsReadCorrectly() { + final BoundedAppender boundedAppender = new BoundedAppender(1); + + boundedAppender.append('a'); + + assertEquals("one char appended is read correctly", "a", boundedAppender.toString()); + } + + @Test + public void appendMultipleCharsAreReadCorrectlyInFifoOrder() { + final BoundedAppender boundedAppender = new BoundedAppender(2); + + boundedAppender.append('a'); + boundedAppender.append('b'); + boundedAppender.append('c'); + boundedAppender.append('d'); + + assertEquals("multiple chars appended are read correctly, the ones deleted in FIFO order", "cd", + boundedAppender.toString()); + } +} \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java index ab84985edcbc2a6ee34de90ec9de2c1edfbfcf1d..8eb5ed64110d9c50b27c1e2a4f274d4c6beaf5a8 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java @@ -109,6 +109,7 @@ import org.apache.hadoop.yarn.state.SingleArcTransition; import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachineFactory; +import org.apache.hadoop.yarn.util.BoundedAppender; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import com.google.common.annotations.VisibleForTesting; @@ -171,7 +172,7 @@ // Set to null initially. Will eventually get set // if an RMAppAttemptUnregistrationEvent occurs private FinalApplicationStatus finalStatus = null; - private final StringBuilder diagnostics = new StringBuilder(); + private final BoundedAppender diagnostics; private int amContainerExitStatus = ContainerExitStatus.INVALID; private Configuration conf; @@ -518,6 +519,10 @@ public RMAppAttemptImpl(ApplicationAttemptId appAttemptId, this.amReq = amReq; this.blacklistedNodesForAM = amBlacklistManager; + + this.diagnostics = new BoundedAppender( + conf.getInt(YarnConfiguration.APP_ATTEMPT_DIAGNOSTICS_CAPACITY_BYTES, + YarnConfiguration.DEFAULT_APP_ATTEMPT_DIAGNOSTICS_CAPACITY_BYTES)); } @Override