diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeLabelAttributes.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeLabelAttributes.java new file mode 100644 index 0000000..f382215 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeLabelAttributes.java @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.api.records; + +import java.util.Map; + +import org.apache.hadoop.classification.InterfaceAudience.Private; +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Stable; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.yarn.util.Records; + +@Private +@Unstable +public abstract class NodeLabelAttributes { + @Private + @Unstable + public static NodeLabelAttributes newInstance(String nodeLabel, + Map attribute) { + NodeLabelAttributes request = + Records.newRecord(NodeLabelAttributes.class); + request.setNodeLabel(nodeLabel); + request.setAttributes(attribute); + return request; + } + + @Public + @Stable + public abstract String getNodeLabel(); + + @Private + @Unstable + public abstract void setNodeLabel(String nodeLabel); + + @Public + @Stable + public abstract Map getAttributes(); + + @Private + @Unstable + public abstract void setAttributes(Map attributes); +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/ResourceManagerAdministrationProtocol.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/ResourceManagerAdministrationProtocol.java index 2061aef..15ce2f9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/ResourceManagerAdministrationProtocol.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/ResourceManagerAdministrationProtocol.java @@ -48,6 +48,8 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesRequest; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse; @@ -120,8 +122,8 @@ public UpdateNodeResourceResponse updateNodeResource( @Public @Evolving @Idempotent - public AddToClusterNodeLabelsResponse addToClusterNodeLabels(AddToClusterNodeLabelsRequest request) - throws YarnException, IOException; + public AddToClusterNodeLabelsResponse addToClusterNodeLabels( + AddToClusterNodeLabelsRequest request) throws YarnException, IOException; @Public @Evolving @@ -134,4 +136,10 @@ public RemoveFromClusterNodeLabelsResponse removeFromClusterNodeLabels( @Idempotent public ReplaceLabelsOnNodeResponse replaceLabelsOnNode( ReplaceLabelsOnNodeRequest request) throws YarnException, IOException; + + @Public + @Evolving + @Idempotent + public SetNodeLabelsAttributesResponse setNodeLabelsAttributes( + SetNodeLabelsAttributesRequest request) throws YarnException, IOException; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/SetNodeLabelsAttributesRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/SetNodeLabelsAttributesRequest.java new file mode 100644 index 0000000..7177a1d --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/SetNodeLabelsAttributesRequest.java @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import java.util.List; + +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; + +public abstract class SetNodeLabelsAttributesRequest { + public static SetNodeLabelsAttributesRequest newInstance( + List nodeLabelsAttributes) { + return null; + } + + public abstract void setNodeLabelsAttributes( + List nodeLabelsAttributes); + + public abstract List getNodeLabelsAttributes(); +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/SetNodeLabelsAttributesResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/SetNodeLabelsAttributesResponse.java new file mode 100644 index 0000000..a20f7a1 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/SetNodeLabelsAttributesResponse.java @@ -0,0 +1,23 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +public class SetNodeLabelsAttributesResponse { + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/resourcemanager_administration_protocol.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/resourcemanager_administration_protocol.proto index 7f54b8e..f1a1ffc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/resourcemanager_administration_protocol.proto +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/resourcemanager_administration_protocol.proto @@ -42,4 +42,5 @@ service ResourceManagerAdministrationProtocolService { rpc addToClusterNodeLabels(AddToClusterNodeLabelsRequestProto) returns (AddToClusterNodeLabelsResponseProto); rpc removeFromClusterNodeLabels(RemoveFromClusterNodeLabelsRequestProto) returns (RemoveFromClusterNodeLabelsResponseProto); rpc replaceLabelsOnNodes(ReplaceLabelsOnNodeRequestProto) returns (ReplaceLabelsOnNodeResponseProto); + rpc setNodeLabelsAttributes(SetNodeLabelsAttributesRequestProto) returns (SetNodeLabelsAttributesResponseProto); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto index 900e349..714591a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto @@ -97,6 +97,14 @@ message ReplaceLabelsOnNodeResponseProto { } +message SetNodeLabelsAttributesRequestProto { + repeated NodeLabelAttributesProto nodeLabelsAttributes = 1; +} + + +message SetNodeLabelsAttributesResponseProto { +} + ////////////////////////////////////////////////////////////////// ///////////// RM Failover related records //////////////////////// ////////////////////////////////////////////////////////////////// diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto index 2edff99..870e6c8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto @@ -244,6 +244,11 @@ message LabelsToNodeIdsProto { repeated NodeIdProto nodeId = 2; } +message NodeLabelAttributesProto { + optional string nodeLabel = 1; + repeated StringStringMapProto attributes = 2; +} + //////////////////////////////////////////////////////////////////////// ////// From AM_RM_Protocol ///////////////////////////////////////////// //////////////////////////////////////////////////////////////////////// diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeLabelAttributesPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeLabelAttributesPBImpl.java new file mode 100644 index 0000000..73e1e0e --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeLabelAttributesPBImpl.java @@ -0,0 +1,169 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.api.records.impl.pb; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelAttributesProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelAttributesProtoOrBuilder; +import org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto; + +public class NodeLabelAttributesPBImpl extends NodeLabelAttributes { + NodeLabelAttributesProto proto = + NodeLabelAttributesProto.getDefaultInstance(); + NodeLabelAttributesProto.Builder builder = null; + Map attributes = null; + boolean viaProto = false; + + public NodeLabelAttributesPBImpl() { + builder = NodeLabelAttributesProto.newBuilder(); + } + + public NodeLabelAttributesPBImpl(NodeLabelAttributesProto proto) { + this.proto = proto; + viaProto = true; + } + + public NodeLabelAttributesProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + private void mergeLocalToProto() { + if (viaProto) + maybeInitBuilder(); + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + private void mergeLocalToBuilder() { + if (this.attributes != null) { + addAttributesToProto(); + } + } + + private void addAttributesToProto() { + maybeInitBuilder(); + builder.clearAttributes(); + if (attributes == null) + return; + Iterable iterable = + new Iterable() { + + @Override + public Iterator iterator() { + return new Iterator() { + + Iterator keyIter = attributes.keySet().iterator(); + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + @Override + public StringStringMapProto next() { + String key = keyIter.next(); + return StringStringMapProto.newBuilder().setKey(key).setValue( + (attributes.get(key))).build(); + } + + @Override + public boolean hasNext() { + return keyIter.hasNext(); + } + }; + } + }; + builder.addAllAttributes(iterable); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = NodeLabelAttributesProto.newBuilder(proto); + } + viaProto = false; + } + + + @Override + public String getNodeLabel() { + NodeLabelAttributesProtoOrBuilder p = viaProto ? proto : builder; + if (!p.hasNodeLabel()) { + return null; + } + return (p.getNodeLabel()); + } + + @Override + public void setNodeLabel(String nodeLabel) { + maybeInitBuilder(); + if (nodeLabel == null) { + builder.clearNodeLabel(); + return; + } + builder.setNodeLabel(nodeLabel); + } + + @Override + public Map getAttributes() { + initAttributes(); + return this.attributes; + } + + private void initAttributes() { + if (this.attributes != null) { + return; + } + NodeLabelAttributesProtoOrBuilder p = viaProto ? proto : builder; + List list = p.getAttributesList(); + this.attributes = new HashMap(); + + for (StringStringMapProto c : list) { + this.attributes.put(c.getKey(), c.getValue()); + } + } + + @Override + public void setAttributes(Map attributes) { + if (attributes == null) + return; + initAttributes(); + this.attributes.clear(); + this.attributes.putAll(attributes); + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java index e2da664..be05f4f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/CommonNodeLabelsManager.java @@ -24,6 +24,7 @@ import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; @@ -40,6 +41,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; @@ -49,14 +51,24 @@ import org.apache.hadoop.yarn.nodelabels.event.NodeLabelsStoreEvent; import org.apache.hadoop.yarn.nodelabels.event.NodeLabelsStoreEventType; import org.apache.hadoop.yarn.nodelabels.event.RemoveClusterNodeLabels; +import org.apache.hadoop.yarn.nodelabels.event.StoreSetNodeLabelsAttributesEvent; import org.apache.hadoop.yarn.nodelabels.event.StoreNewClusterNodeLabels; import org.apache.hadoop.yarn.nodelabels.event.UpdateNodeToLabelsMappingsEvent; import org.apache.hadoop.yarn.util.resource.Resources; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; public class CommonNodeLabelsManager extends AbstractService { + private static final String TYPE_KEY = "type"; + private static final Set ACCEPT_ATTRIBUTE_KEYS = ImmutableSet + .of(TYPE_KEY); + private static final String EXCLUSIVE_TYPE = "exclusive"; + private static final String SHAREABLE_TYPE = "shareable"; + private static final Set ACCEPT_ATTRIBUTE_TYPE_VALUES = ImmutableSet + .of(EXCLUSIVE_TYPE, SHAREABLE_TYPE); + protected static final Log LOG = LogFactory.getLog(CommonNodeLabelsManager.class); private static final int MAX_LABEL_LENGTH = 255; public static final Set EMPTY_STRING_SET = Collections @@ -181,6 +193,13 @@ protected void handleStoreEvent(NodeLabelsStoreEvent event) { store.updateNodeToLabelsMappings(updateNodeToLabelsMappingsEvent .getNodeToLabels()); break; + case SET_NODE_LABELS_ATTRIBUTES: + StoreSetNodeLabelsAttributesEvent + storeSetNodeLabelsAttributesEventEvent = + (StoreSetNodeLabelsAttributesEvent) event; + store.setNodeLabelsAttributes(storeSetNodeLabelsAttributesEventEvent + .getNodeLabelAttributes()); + break; } } catch (IOException e) { LOG.error("Failed to store label modification to storage"); @@ -777,6 +796,93 @@ public void replaceLabelsOnNode(Map> replaceLabelsToNode) readLock.unlock(); } } + + private void checkSetNodeLabelsAttributes( + List nodeLabelsAttributes) throws IOException { + // pre-check + for (NodeLabelAttributes attr : nodeLabelsAttributes) { + // check node-label included + NodeLabel label = labelCollections.get(attr.getNodeLabel()); + if (attr.getAttributes().isEmpty()) { + // simply ignore + continue; + } + + if (null == label) { + String message = + String.format( + "Trying to set attribute of an un existed node-label=%s", + label); + LOG.error(message); + throw new IOException(message); + } + Map attributes = attr.getAttributes(); + + // check attribute keys accepted + Set rejectedKeys = + Sets.difference(attributes.keySet(), ACCEPT_ATTRIBUTE_KEYS); + if (!rejectedKeys.isEmpty()) { + String message = + String.format("Some keys specified in attribute not accepted by" + + " NodeLabelsManager, they're:[%s], acceptable keys=[%s]", + StringUtils.join(rejectedKeys.iterator(), ","), + StringUtils.join(ACCEPT_ATTRIBUTE_KEYS, ",")); + LOG.error(message); + throw new IOException(message); + } + + // check attribute values of "type" acceptance + String typeValue = attributes.get(TYPE_KEY); + if (!ACCEPT_ATTRIBUTE_TYPE_VALUES.contains(typeValue)) { + String message = + String.format("Set attribute, key=%s, value=%s is not accepted, " + + "acceptable values are:[%s]", TYPE_KEY, typeValue, + StringUtils.join( + ACCEPT_ATTRIBUTE_TYPE_VALUES.iterator(), ",")); + LOG.error(message); + throw new IOException(); + } + } + } + + public void setNodeLabelsAttributes( + List nodeLabelsAttributes) throws IOException { + try { + writeLock.lock(); + checkSetNodeLabelsAttributes(nodeLabelsAttributes); + + for (NodeLabelAttributes attr : nodeLabelsAttributes) { + NodeLabel label = labelCollections.get(attr.getNodeLabel()); + label.setShareable(attr.getAttributes().get(TYPE_KEY) + .equals(SHAREABLE_TYPE)); + } + + if (null != dispatcher && !nodeLabelsAttributes.isEmpty()) { + dispatcher.getEventHandler().handle( + new StoreSetNodeLabelsAttributesEvent(nodeLabelsAttributes)); + } + } finally { + writeLock.unlock(); + } + } + + public boolean isShareableNodeLabel(String nodeLabel) throws IOException { + try { + readLock.lock(); + NodeLabel label = labelCollections.get(nodeLabel); + if (label == null) { + String message = + "Getting is-shareable-node-label, node-label = " + nodeLabel + + ", is not existed."; + LOG.error(message); + throw new IOException(message); + } + return label.getShareable(); + } finally { + readLock.unlock(); + } + } + private void checkAndThrowLabelName(String label) throws IOException { if (label == null || label.isEmpty() || label.length() > MAX_LABEL_LENGTH) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java index 6e685ee..ba439e2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/FileSystemNodeLabelsStore.java @@ -21,6 +21,7 @@ import java.io.EOFException; import java.io.IOException; import java.util.Collection; +import java.util.List; import java.util.Map; import java.util.Set; @@ -34,16 +35,20 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SetNodeLabelsAttributesRequestProto; import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsRequestPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.SetNodeLabelsAttributesRequestPBImpl; import com.google.common.collect.Sets; @@ -60,7 +65,7 @@ public FileSystemNodeLabelsStore(CommonNodeLabelsManager mgr) { protected static final String EDITLOG_FILENAME = "nodelabel.editlog"; protected enum SerializedLogType { - ADD_LABELS, NODE_TO_LABELS, REMOVE_LABELS + ADD_LABELS, NODE_TO_LABELS, REMOVE_LABELS, SET_NODE_LABELS_ATTRIBUTES } Path fsWorkingPath; @@ -150,6 +155,16 @@ public void removeClusterNodeLabels(Collection labels) .newHashSet(labels.iterator()))).getProto().writeDelimitedTo(editlogOs); ensureCloseEditlogFile(); } + + @Override + public void setNodeLabelsAttributes(List attributes) + throws IOException { + ensureAppendEditlogFile(); + editlogOs.writeInt(SerializedLogType.SET_NODE_LABELS_ATTRIBUTES.ordinal()); + ((SetNodeLabelsAttributesRequestPBImpl) SetNodeLabelsAttributesRequest + .newInstance(attributes)).getProto().writeDelimitedTo(editlogOs); + ensureCloseEditlogFile(); + } @Override public void recover() throws IOException { @@ -221,6 +236,14 @@ public void recover() throws IOException { mgr.replaceLabelsOnNode(map); break; } + case SET_NODE_LABELS_ATTRIBUTES: { + List attributes = + new SetNodeLabelsAttributesRequestPBImpl( + SetNodeLabelsAttributesRequestProto.parseDelimitedFrom(is)) + .getNodeLabelsAttributes(); + mgr.setNodeLabelsAttributes(attributes); + break; + } } } catch (EOFException e) { // EOF hit, break diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabel.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabel.java index 1765a65..490cccb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabel.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabel.java @@ -30,6 +30,7 @@ private int numActiveNMs; private String labelName; private Set nodeIds; + private boolean shareable = false; public NodeLabel(String labelName) { this(labelName, Resource.newInstance(0, 0), 0); @@ -76,6 +77,14 @@ public String getLabelName() { return labelName; } + public void setShareable(boolean shareable) { + this.shareable = shareable; + } + + public boolean getShareable() { + return this.shareable; + } + public NodeLabel getCopy() { return new NodeLabel(labelName, resource, numActiveNMs); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java index 857d81b..7a4095c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/NodeLabelsStore.java @@ -21,11 +21,13 @@ import java.io.Closeable; import java.io.IOException; import java.util.Collection; +import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; public abstract class NodeLabelsStore implements Closeable { protected final CommonNodeLabelsManager mgr; @@ -53,6 +55,12 @@ public abstract void removeClusterNodeLabels(Collection labels) throws IOException; /** + * Set node label attributes + */ + public abstract void setNodeLabelsAttributes( + List attributes) throws IOException; + + /** * Recover labels and node to labels mappings from store * @param conf */ diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/event/NodeLabelsStoreEventType.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/event/NodeLabelsStoreEventType.java index efa2dbe..c89d01f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/event/NodeLabelsStoreEventType.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/event/NodeLabelsStoreEventType.java @@ -21,5 +21,6 @@ public enum NodeLabelsStoreEventType { REMOVE_LABELS, ADD_LABELS, - STORE_NODE_TO_LABELS + STORE_NODE_TO_LABELS, + SET_NODE_LABELS_ATTRIBUTES } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/event/StoreSetNodeLabelsAttributesEvent.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/event/StoreSetNodeLabelsAttributesEvent.java new file mode 100644 index 0000000..df7e21d --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/event/StoreSetNodeLabelsAttributesEvent.java @@ -0,0 +1,36 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.nodelabels.event; + +import java.util.List; + +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; + +public class StoreSetNodeLabelsAttributesEvent extends NodeLabelsStoreEvent { + private List attributes; + + public StoreSetNodeLabelsAttributesEvent(List attributes) { + super(NodeLabelsStoreEventType.SET_NODE_LABELS_ATTRIBUTES); + this.attributes = attributes; + } + + public List getNodeLabelAttributes() { + return attributes; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java index c2d813b..85098dd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java @@ -40,6 +40,7 @@ import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SetNodeLabelsAttributesRequestProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto; import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol; import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocolPB; @@ -61,6 +62,8 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesRequest; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl; @@ -81,6 +84,8 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.SetNodeLabelsAttributesRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.SetNodeLabelsAttributesResponsePBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl; @@ -263,4 +268,18 @@ public ReplaceLabelsOnNodeResponse replaceLabelsOnNode( return null; } } + + @Override + public SetNodeLabelsAttributesResponse setNodeLabelsAttributes( + SetNodeLabelsAttributesRequest request) throws YarnException, IOException { + SetNodeLabelsAttributesRequestProto requestProto = + ((SetNodeLabelsAttributesRequestPBImpl) request).getProto(); + try { + return new SetNodeLabelsAttributesResponsePBImpl( + proxy.setNodeLabelsAttributes(null, requestProto)); + } catch (ServiceException e) { + RPCUtil.unwrapAndThrowException(e); + return null; + } + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceManagerAdministrationProtocolPBServiceImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceManagerAdministrationProtocolPBServiceImpl.java index 0eaf581..86957aa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceManagerAdministrationProtocolPBServiceImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceManagerAdministrationProtocolPBServiceImpl.java @@ -42,6 +42,8 @@ import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SetNodeLabelsAttributesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SetNodeLabelsAttributesResponseProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto; import org.apache.hadoop.yarn.server.api.ResourceManagerAdministrationProtocol; @@ -55,6 +57,8 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RefreshUserToGroupsMappingsResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesRequest; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsResponsePBImpl; @@ -74,6 +78,8 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.SetNodeLabelsAttributesRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.SetNodeLabelsAttributesResponsePBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl; @@ -268,4 +274,21 @@ public ReplaceLabelsOnNodeResponseProto replaceLabelsOnNodes( throw new ServiceException(e); } } + + @Override + public SetNodeLabelsAttributesResponseProto setNodeLabelsAttributes( + RpcController controller, SetNodeLabelsAttributesRequestProto proto) + throws ServiceException { + SetNodeLabelsAttributesRequest request = + new SetNodeLabelsAttributesRequestPBImpl(proto); + try { + SetNodeLabelsAttributesResponse response = + real.setNodeLabelsAttributes(request); + return ((SetNodeLabelsAttributesResponsePBImpl) response).getProto(); + } catch (YarnException e) { + throw new ServiceException(e); + } catch (IOException e) { + throw new ServiceException(e); + } + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeLabelAttributesPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeLabelAttributesPBImpl.java new file mode 100644 index 0000000..e69de29 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/SetNodeLabelsAttributesRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/SetNodeLabelsAttributesRequestPBImpl.java new file mode 100644 index 0000000..d7a7f8b --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/SetNodeLabelsAttributesRequestPBImpl.java @@ -0,0 +1,134 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeLabelAttributesPBImpl; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelAttributesProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SetNodeLabelsAttributesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SetNodeLabelsAttributesRequestProtoOrBuilder; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesRequest; + +public class SetNodeLabelsAttributesRequestPBImpl extends + SetNodeLabelsAttributesRequest { + SetNodeLabelsAttributesRequestProto proto = + SetNodeLabelsAttributesRequestProto.getDefaultInstance(); + SetNodeLabelsAttributesRequestProto.Builder builder = null; + private List nodeLabelAttributes; + boolean viaProto = false; + + public SetNodeLabelsAttributesRequestPBImpl() { + builder = SetNodeLabelsAttributesRequestProto.newBuilder(); + } + + public SetNodeLabelsAttributesRequestPBImpl( + SetNodeLabelsAttributesRequestProto proto) { + this.proto = proto; + viaProto = true; + } + + public SetNodeLabelsAttributesRequestProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + private void mergeLocalToProto() { + if (viaProto) + maybeInitBuilder(); + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + private void mergeLocalToBuilder() { + if (this.nodeLabelAttributes != null) { + addNodeLabelsAttributesToProto(); + } + } + + private void addNodeLabelsAttributesToProto() { + maybeInitBuilder(); + builder.clearNodeLabelsAttributes(); + List protoList = + new ArrayList(); + for (NodeLabelAttributes r : this.nodeLabelAttributes) { + protoList.add(convertToProtoFormat(r)); + } + builder.addAllNodeLabelsAttributes(protoList); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = SetNodeLabelsAttributesRequestProto.newBuilder(proto); + } + viaProto = false; + } + + @Override + public void setNodeLabelsAttributes(List attributes) { + maybeInitBuilder(); + if (attributes == null) { + builder.clearNodeLabelsAttributes(); + } + this.nodeLabelAttributes = attributes; + } + + private void initLocalNodeLabelsAttributes() { + SetNodeLabelsAttributesRequestProtoOrBuilder p = viaProto ? proto : builder; + List attributesProtoList = + p.getNodeLabelsAttributesList(); + this.nodeLabelAttributes = new ArrayList(); + for (NodeLabelAttributesProto r : attributesProtoList) { + this.nodeLabelAttributes.add(convertFromProtoFormat(r)); + } + } + + @Override + public List getNodeLabelsAttributes() { + if (this.nodeLabelAttributes != null) { + return this.nodeLabelAttributes; + } + initLocalNodeLabelsAttributes(); + return this.nodeLabelAttributes; + } + + private NodeLabelAttributes + convertFromProtoFormat(NodeLabelAttributesProto p) { + return new NodeLabelAttributesPBImpl(p); + } + + private NodeLabelAttributesProto convertToProtoFormat(NodeLabelAttributes t) { + return ((NodeLabelAttributesPBImpl) t).getProto(); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/SetNodeLabelsAttributesResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/SetNodeLabelsAttributesResponsePBImpl.java new file mode 100644 index 0000000..96456ac --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/SetNodeLabelsAttributesResponsePBImpl.java @@ -0,0 +1,67 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SetNodeLabelsAttributesResponseProto; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesResponse; + +public class SetNodeLabelsAttributesResponsePBImpl extends + SetNodeLabelsAttributesResponse { + SetNodeLabelsAttributesResponseProto proto = + SetNodeLabelsAttributesResponseProto.getDefaultInstance(); + SetNodeLabelsAttributesResponseProto.Builder builder = null; + boolean viaProto = false; + + public SetNodeLabelsAttributesResponsePBImpl() { + builder = SetNodeLabelsAttributesResponseProto.newBuilder(); + } + + public SetNodeLabelsAttributesResponsePBImpl( + SetNodeLabelsAttributesResponseProto proto) { + this.proto = proto; + viaProto = true; + } + + public SetNodeLabelsAttributesResponseProto getProto() { + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public int hashCode() { + return getProto().hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + @Override + public String toString() { + return getProto().toString().replaceAll("\\n", ", ") + .replaceAll("\\s+", " "); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java index 8b48798..8326ec7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java @@ -33,18 +33,284 @@ import java.util.Random; import java.util.Set; +import org.apache.commons.configuration.tree.NodeAddData; import org.apache.commons.lang.math.LongRange; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.security.proto.SecurityProtos.*; -import org.apache.hadoop.yarn.api.protocolrecords.*; -import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.*; -import org.apache.hadoop.yarn.api.records.*; -import org.apache.hadoop.yarn.api.records.impl.pb.*; -import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.*; -import org.apache.hadoop.yarn.proto.YarnProtos.*; -import org.apache.hadoop.yarn.proto.YarnServiceProtos.*; -import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.*; +import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto; +import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto; +import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto; +import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto; +import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto; +import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto; +import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; +import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.AllocateRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.AllocateResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.CancelDelegationTokenRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.CancelDelegationTokenResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationMasterRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationMasterResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptReportRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptReportResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptsRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptsResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodeLabelsRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodeLabelsResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerReportRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerReportResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerStatusesRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerStatusesResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainersRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainersResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetDelegationTokenRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetDelegationTokenResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetLabelsToNodesRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetLabelsToNodesResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNodesToLabelsRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNodesToLabelsResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.KillApplicationRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.KillApplicationResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.MoveApplicationAcrossQueuesRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.MoveApplicationAcrossQueuesResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationDeleteRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationDeleteResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationSubmissionRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationSubmissionResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationUpdateRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationUpdateResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainerRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainersRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainersResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainersRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainersResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.ApplicationReport; +import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; +import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; +import org.apache.hadoop.yarn.api.records.Container; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; +import org.apache.hadoop.yarn.api.records.ContainerReport; +import org.apache.hadoop.yarn.api.records.ContainerResourceDecrease; +import org.apache.hadoop.yarn.api.records.ContainerResourceIncrease; +import org.apache.hadoop.yarn.api.records.ContainerResourceIncreaseRequest; +import org.apache.hadoop.yarn.api.records.ContainerStatus; +import org.apache.hadoop.yarn.api.records.LocalResource; +import org.apache.hadoop.yarn.api.records.LogAggregationContext; +import org.apache.hadoop.yarn.api.records.NMToken; +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; +import org.apache.hadoop.yarn.api.records.NodeReport; +import org.apache.hadoop.yarn.api.records.PreemptionContainer; +import org.apache.hadoop.yarn.api.records.PreemptionContract; +import org.apache.hadoop.yarn.api.records.PreemptionMessage; +import org.apache.hadoop.yarn.api.records.PreemptionResourceRequest; +import org.apache.hadoop.yarn.api.records.Priority; +import org.apache.hadoop.yarn.api.records.QueueInfo; +import org.apache.hadoop.yarn.api.records.QueueState; +import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; +import org.apache.hadoop.yarn.api.records.ReservationDefinition; +import org.apache.hadoop.yarn.api.records.ReservationId; +import org.apache.hadoop.yarn.api.records.ReservationRequest; +import org.apache.hadoop.yarn.api.records.ReservationRequests; +import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest; +import org.apache.hadoop.yarn.api.records.ResourceOption; +import org.apache.hadoop.yarn.api.records.ResourceRequest; +import org.apache.hadoop.yarn.api.records.SerializedException; +import org.apache.hadoop.yarn.api.records.StrictPreemptionContract; +import org.apache.hadoop.yarn.api.records.Token; +import org.apache.hadoop.yarn.api.records.URL; +import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptReportPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationResourceUsageReportPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerLaunchContextPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerReportPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerResourceDecreasePBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerResourceIncreasePBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerResourceIncreaseRequestPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerStatusPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.LocalResourcePBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.NMTokenPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeLabelAttributesPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeReportPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionContainerPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionContractPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionMessagePBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionResourceRequestPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PriorityPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.QueueInfoPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.QueueUserACLInfoPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ResourceBlacklistRequestPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ResourceOptionPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ResourceRequestPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.SerializedExceptionPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.StrictPreemptionContractPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.TokenPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.URLPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.YarnClusterMetricsPBImpl; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerResourceDecreaseProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerResourceIncreaseProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerResourceIncreaseRequestProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto; +import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelAttributesProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto; +import org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto; +import org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto; +import org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto; +import org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto; +import org.apache.hadoop.yarn.proto.YarnProtos.URLProto; +import org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SetNodeLabelsAttributesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.SetNodeLabelsAttributesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshAdminAclsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshAdminAclsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshNodesRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshNodesResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshQueuesRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshQueuesResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshServiceAclsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshServiceAclsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshSuperUserGroupsConfigurationRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshSuperUserGroupsConfigurationResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.SetNodeLabelsAttributesRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.SetNodeLabelsAttributesResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl; import org.apache.hadoop.yarn.util.resource.Resources; import org.junit.Assert; import org.junit.BeforeClass; @@ -212,6 +478,7 @@ public static void setup() throws Exception { generateByNewInstance(StrictPreemptionContract.class); generateByNewInstance(PreemptionMessage.class); generateByNewInstance(StartContainerRequest.class); + generateByNewInstance(NodeLabelAttributes.class); // genByNewInstance does not apply to QueueInfo, cause // it is recursive(has sub queues) typeValueCache.put(QueueInfo.class, QueueInfo.newInstance("root", 1.0f, @@ -1015,4 +1282,22 @@ public void testGetLabelsToNodesResponsePBImpl() throws Exception { validatePBImplRecord(GetLabelsToNodesResponsePBImpl.class, GetLabelsToNodesResponseProto.class); } + + @Test + public void testNodeLabelAttributesPBImpl() throws Exception { + validatePBImplRecord(NodeLabelAttributesPBImpl.class, + NodeLabelAttributesProto.class); + } + + @Test + public void testSetNodeLabelsAttributesRequestPBImpl() throws Exception { + validatePBImplRecord(SetNodeLabelsAttributesRequestPBImpl.class, + SetNodeLabelsAttributesRequestProto.class); + } + + @Test + public void testSetNodeLabelsAttributesResponsePBImpl() throws Exception { + validatePBImplRecord(SetNodeLabelsAttributesResponsePBImpl.class, + SetNodeLabelsAttributesResponseProto.class); + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java index 65ea79f..c609ead 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/DummyCommonNodeLabelsManager.java @@ -20,17 +20,20 @@ import java.io.IOException; import java.util.Collection; +import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; import org.apache.hadoop.yarn.event.InlineDispatcher; public class DummyCommonNodeLabelsManager extends CommonNodeLabelsManager { Map> lastNodeToLabels = null; Collection lastAddedlabels = null; Collection lastRemovedlabels = null; + List lastNodeLabelAttributes = null; @Override public void initNodeLabelStore(Configuration conf) { @@ -58,6 +61,12 @@ public void storeNewClusterNodeLabels(Set label) throws IOException { } @Override + public void setNodeLabelsAttributes(List attributes) + throws IOException { + lastNodeLabelAttributes = attributes; + } + + @Override public void close() throws IOException { // do nothing } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java index d05c75c..20d60e1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestCommonNodeLabelsManager.java @@ -29,6 +29,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.junit.After; import org.junit.Assert; @@ -536,4 +537,55 @@ public void testNoMoreThanOneLabelExistedInOneHost() throws IOException { Assert.assertTrue("Should failed when #labels > 1 on a host after add", failed); } + + @Test (timeout = 5000) + public void testSetNodeLabelsAttributes() throws IOException { + boolean failed = false; + + // should fail: label isn't exist + try { + mgr.setNodeLabelsAttributes(Arrays.asList(NodeLabelAttributes.newInstance( + "p1", ImmutableMap.of("type", "exclusive")))); + } catch (IOException e) { + failed = true; + } + Assert.assertTrue("Should fail since the node label doesn't exist", failed); + + mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3")); + + // should fail: type not supported + try { + mgr.setNodeLabelsAttributes(Arrays.asList( + NodeLabelAttributes.newInstance("p1", + ImmutableMap.of("type", "exclusive", "unkownType", "exclusive")))); + } catch (IOException e) { + failed = true; + } + Assert.assertTrue("Should failed when some type is not accepted", failed); + + // should fail: value not supported + try { + mgr.setNodeLabelsAttributes(Arrays.asList( + NodeLabelAttributes.newInstance("p1", + ImmutableMap.of("type", "not-supported")))); + } catch (IOException e) { + failed = true; + } + Assert.assertTrue("Should failed when some type is not accepted", failed); + + mgr.setNodeLabelsAttributes(Arrays.asList( + NodeLabelAttributes.newInstance("p1", + ImmutableMap.of("type", "exclusive")), + NodeLabelAttributes.newInstance("p2", + ImmutableMap.of("type", "shareable")))); + Assert.assertEquals("p1", mgr.lastNodeLabelAttributes.get(0).getNodeLabel()); + Assert.assertEquals("exclusive", + mgr.lastNodeLabelAttributes.get(0).getAttributes().get("type")); + Assert.assertEquals("shareable", + mgr.lastNodeLabelAttributes.get(1).getAttributes().get("type")); + + // Check shareable for p1/p2 + Assert.assertFalse(mgr.isShareableNodeLabel("p1")); + Assert.assertTrue(mgr.isShareableNodeLabel("p2")); + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java index 5cc026a..a5a1f7a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/nodelabels/TestFileSystemNodeLabelsStore.java @@ -24,6 +24,7 @@ import java.util.Map; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.InlineDispatcher; import org.junit.After; @@ -188,7 +189,7 @@ public void testEditlogRecover() throws Exception { } @SuppressWarnings({ "unchecked", "rawtypes" }) - @Test//(timeout = 10000) + @Test (timeout = 10000) public void testSerilizationAfterRecovery() throws Exception { mgr.addToCluserNodeLabels(toSet("p1", "p2", "p3")); mgr.addToCluserNodeLabels(toSet("p4")); @@ -218,6 +219,16 @@ public void testSerilizationAfterRecovery() throws Exception { * p4: n4 * p6: n6, n7 */ + + mgr.setNodeLabelsAttributes(Arrays.asList(NodeLabelAttributes + .newInstance("p2", ImmutableMap.of("type", "shareable")))); + mgr.setNodeLabelsAttributes(Arrays.asList(NodeLabelAttributes + .newInstance("p6", ImmutableMap.of("type", "shareable")))); + + /* + * Set p2/p6 to be shareable + */ + // shutdown mgr and start a new mgr mgr.stop(); @@ -239,6 +250,10 @@ public void testSerilizationAfterRecovery() throws Exception { "p4", toSet(toNodeId("n4")), "p2", toSet(toNodeId("n2")))); + Assert.assertTrue(mgr.isShareableNodeLabel("p2")); + Assert.assertFalse(mgr.isShareableNodeLabel("p4")); + Assert.assertTrue(mgr.isShareableNodeLabel("p6")); + /* * Add label p7,p8 then shutdown */ diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java index 6180995..b20f01a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java @@ -77,6 +77,8 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesRequest; +import org.apache.hadoop.yarn.server.api.protocolrecords.SetNodeLabelsAttributesResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceResponse; import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationSystem; @@ -663,6 +665,29 @@ public ReplaceLabelsOnNodeResponse replaceLabelsOnNode( throw logAndWrapException(ioe, user.getShortUserName(), argName, msg); } } + + @Override + public SetNodeLabelsAttributesResponse setNodeLabelsAttributes( + SetNodeLabelsAttributesRequest request) throws YarnException, IOException { + String argName = "setNodeLabelsAttributes"; + final String msg = "set attributes of node labels"; + UserGroupInformation user = checkAcls(argName); + + checkRMStatus(user.getShortUserName(), argName, msg); + + SetNodeLabelsAttributesResponse response = + recordFactory.newRecordInstance(SetNodeLabelsAttributesResponse.class); + + try { + rmContext.getNodeLabelManager().setNodeLabelsAttributes( + request.getNodeLabelsAttributes()); + RMAuditLogger + .logSuccess(user.getShortUserName(), argName, "AdminService"); + return response; + } catch (IOException ioe) { + throw logAndWrapException(ioe, user.getShortUserName(), argName, msg); + } + } private void checkRMStatus(String user, String argName, String msg) throws StandbyException { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/RMNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/RMNodeLabelsManager.java index e5abdc9..c6e66d4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/RMNodeLabelsManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/RMNodeLabelsManager.java @@ -45,7 +45,6 @@ import com.google.common.collect.ImmutableSet; public class RMNodeLabelsManager extends CommonNodeLabelsManager { - protected static class Queue { protected Set acccessibleNodeLabels; protected Resource resource; @@ -156,7 +155,7 @@ public void replaceLabelsOnNode(Map> replaceLabelsToNode) throws IOException { try { writeLock.lock(); - + // get nodesCollection before edition Map before = cloneNodeMap(replaceLabelsToNode.keySet()); @@ -171,7 +170,6 @@ public void replaceLabelsOnNode(Map> replaceLabelsToNode) writeLock.unlock(); } } - /* * Following methods are used for setting if a node is up and running, and it diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java index b1be525..6e42a3e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/nodelabels/NullRMNodeLabelsManager.java @@ -20,11 +20,13 @@ import java.io.IOException; import java.util.Collection; +import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeLabelAttributes; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.nodelabels.NodeLabelsStore; @@ -63,6 +65,12 @@ public void storeNewClusterNodeLabels(Set label) throws IOException { public void close() throws IOException { // do nothing } + + @Override + public void setNodeLabelsAttributes(List attributes) + throws IOException { + // do nothing + } }; }