diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java index 553ba70..b6fe7be 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; +import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapred.JobPriority; @@ -485,7 +486,8 @@ public static QueueInfo fromYarn(org.apache.hadoop.yarn.api.records.QueueInfo (queueInfo.getMaximumCapacity() < 0 ? "UNDEFINED" : queueInfo.getMaximumCapacity() * 100) + ", CurrentCapacity: " + queueInfo.getCurrentCapacity() * 100, fromYarn(queueInfo.getQueueState()), - TypeConverter.fromYarnApps(queueInfo.getApplications(), conf)); + TypeConverter.fromYarnApps(queueInfo.getApplications(), conf), + queueInfo.getNodeLabels(), queueInfo.getDefaultNodeLabelExpression()); List childQueues = new ArrayList(); for(org.apache.hadoop.yarn.api.records.QueueInfo childQueue : queueInfo.getChildQueues()) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java index 097e338..44ab191 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java @@ -135,6 +135,14 @@ void printJobQueueInfo(JobQueueInfo jobQueueInfo, Writer writer, jobQueueInfo.getQueueState())); writer.write(String.format(prefix + "Scheduling Info : %s \n", jobQueueInfo.getSchedulingInfo())); + if (jobQueueInfo.getLabels() != null) { + writer.write(String.format(prefix + "Labels : %s \n", + jobQueueInfo.getLabels())); + } + if (jobQueueInfo.getDefaultLabelExpression() != null) { + writer.write(String.format(prefix + "DefaultLabelExpression : %s \n", + jobQueueInfo.getDefaultLabelExpression())); + } List childQueues = jobQueueInfo.getChildren(); if (childQueues != null && childQueues.size() > 0) { for (int i = 0; i < childQueues.size(); i++) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java index 67b73ce..4025967 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueInfo.java @@ -60,6 +60,8 @@ public JobQueueInfo(String queueName, String schedulingInfo) { setQueueChildren(queue.getQueueChildren()); setProperties(queue.getProperties()); setJobStatuses(queue.getJobStatuses()); + setLabels(queue.getLabels()); + setDefaultLabelExpression(queue.getDefaultLabelExpression()); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/QueueInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/QueueInfo.java index 6e6ce9e..46a9c51 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/QueueInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/QueueInfo.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Set; import java.util.Properties; import org.apache.hadoop.classification.InterfaceAudience; @@ -54,6 +55,10 @@ private List children; private Properties props; + + private Set labels; + + private String defaultLabelExpression; /** * Default constructor for QueueInfo. @@ -79,6 +84,13 @@ public QueueInfo(String queueName, String schedulingInfo) { this.queueName = queueName; this.schedulingInfo = schedulingInfo; } + + public QueueInfo(String queueName, String schedulingInfo, QueueState state, + JobStatus[] stats) { + this(queueName, schedulingInfo); + this.queueState = state; + this.stats = stats; + } /** * @@ -86,12 +98,15 @@ public QueueInfo(String queueName, String schedulingInfo) { * @param schedulingInfo * @param state * @param stats + * @param labels + * @param defaultLabelExpression */ public QueueInfo(String queueName, String schedulingInfo, QueueState state, - JobStatus[] stats) { - this(queueName, schedulingInfo); - this.queueState = state; - this.stats = stats; + JobStatus[] stats, Set labels, + String defaultLabelExpression) { + this(queueName, schedulingInfo, state, stats); + this.labels = labels; + this.defaultLabelExpression = defaultLabelExpression; } /** @@ -189,6 +204,22 @@ protected void setProperties(Properties props) { return stats; } + public Set getLabels() { + return labels; + } + + public String getDefaultLabelExpression() { + return defaultLabelExpression; + } + + public void setLabels(Set labels) { + this.labels = labels; + } + + public void setDefaultLabelExpression(String defaultLabelExpression) { + this.defaultLabelExpression = defaultLabelExpression; + } + @Override public void readFields(DataInput in) throws IOException { queueName = StringInterner.weakIntern(Text.readString(in)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index 9419d03..30685f7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -320,6 +320,7 @@ private LocalResource createApplicationResource(FileContext fs, Path p, LocalRes return rsrc; } + @SuppressWarnings("deprecation") public ApplicationSubmissionContext createApplicationSubmissionContext( Configuration jobConf, String jobSubmitDir, Credentials ts) throws IOException { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/QueueInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/QueueInfo.java index 7146db2..ff4f4cc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/QueueInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/QueueInfo.java @@ -19,6 +19,7 @@ package org.apache.hadoop.yarn.api.records; import java.util.List; +import java.util.Set; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -48,13 +49,24 @@ @Public @Stable public abstract class QueueInfo { - + @Private @Unstable public static QueueInfo newInstance(String queueName, float capacity, float maximumCapacity, float currentCapacity, List childQueues, List applications, QueueState queueState) { + return newInstance(queueName, capacity, maximumCapacity, currentCapacity, + childQueues, applications, queueState, null, null); + } + + @Private + @Unstable + public static QueueInfo newInstance(String queueName, float capacity, + float maximumCapacity, float currentCapacity, + List childQueues, List applications, + QueueState queueState, Set nodeLabels, + String defaultNodeLabelExpression) { QueueInfo queueInfo = Records.newRecord(QueueInfo.class); queueInfo.setQueueName(queueName); queueInfo.setCapacity(capacity); @@ -63,6 +75,8 @@ public static QueueInfo newInstance(String queueName, float capacity, queueInfo.setChildQueues(childQueues); queueInfo.setApplications(applications); queueInfo.setQueueState(queueState); + queueInfo.setNodeLabels(nodeLabels); + queueInfo.setDefaultNodeLabelExpression(defaultNodeLabelExpression); return queueInfo; } @@ -149,4 +163,29 @@ public static QueueInfo newInstance(String queueName, float capacity, @Private @Unstable public abstract void setQueueState(QueueState queueState); + + /** + * Get the node labels of the queue. + * @return node labels of the queue + */ + @Public + @Stable + public abstract Set getNodeLabels(); + + @Private + @Unstable + public abstract void setNodeLabels(Set labels); + + /** + * Get the default node label expression of the queue + * @return default node label expression of the queue + */ + @Public + @Stable + public abstract String getDefaultNodeLabelExpression(); + + @Public + @Stable + public abstract void setDefaultNodeLabelExpression( + String defaultLabelExpression); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/AddToClusterNodeLabelsRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/AddToClusterNodeLabelsRequest.java new file mode 100644 index 0000000..e2eaff1 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/AddToClusterNodeLabelsRequest.java @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import java.util.Set; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.util.Records; + +@Public +@Evolving +public abstract class AddToClusterNodeLabelsRequest { + public static AddToClusterNodeLabelsRequest newInstance(Set labels) { + AddToClusterNodeLabelsRequest request = + Records.newRecord(AddToClusterNodeLabelsRequest.class); + request.setLabels(labels); + return request; + } + + @Public + @Evolving + public abstract void setLabels(Set labels); + + @Public + @Evolving + public abstract Set getLabels(); +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/AddToClusterNodeLabelsResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/AddToClusterNodeLabelsResponse.java new file mode 100644 index 0000000..7e70ca9 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/AddToClusterNodeLabelsResponse.java @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.util.Records; + +@Public +@Evolving +public abstract class AddToClusterNodeLabelsResponse { + public static AddToClusterNodeLabelsResponse newInstance() { + return Records.newRecord(AddToClusterNodeLabelsResponse.class); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetClusterNodeLabelsRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetClusterNodeLabelsRequest.java new file mode 100644 index 0000000..208fe77 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetClusterNodeLabelsRequest.java @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.util.Records; + +@Public +@Evolving +public abstract class GetClusterNodeLabelsRequest { + public static GetClusterNodeLabelsRequest newInstance() { + return Records.newRecord(GetClusterNodeLabelsRequest.class); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetClusterNodeLabelsResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetClusterNodeLabelsResponse.java new file mode 100644 index 0000000..b0e6470 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetClusterNodeLabelsResponse.java @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import java.util.Set; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.util.Records; + +@Public +@Evolving +public abstract class GetClusterNodeLabelsResponse { + public static GetClusterNodeLabelsResponse newInstance(Set labels) { + GetClusterNodeLabelsResponse request = + Records.newRecord(GetClusterNodeLabelsResponse.class); + request.setLabels(labels); + return request; + } + + @Public + @Evolving + public abstract void setLabels(Set labels); + + @Public + @Evolving + public abstract Set getLabels(); +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetNodeToLabelsRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetNodeToLabelsRequest.java new file mode 100644 index 0000000..56d6587 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetNodeToLabelsRequest.java @@ -0,0 +1,27 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import org.apache.hadoop.yarn.util.Records; + +public abstract class GetNodeToLabelsRequest { + public static GetNodeToLabelsRequest newInstance() { + return Records.newRecord(GetNodeToLabelsRequest.class); + } +} \ No newline at end of file diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetNodeToLabelsResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetNodeToLabelsResponse.java new file mode 100644 index 0000000..c8b40b5 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/GetNodeToLabelsResponse.java @@ -0,0 +1,45 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import java.util.Map; +import java.util.Set; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.util.Records; + +public abstract class GetNodeToLabelsResponse { + public static GetNodeToLabelsResponse newInstance( + Map> map) { + GetNodeToLabelsResponse response = + Records.newRecord(GetNodeToLabelsResponse.class); + response.setNodeToLabels(map); + return response; + } + + @Public + @Evolving + public abstract void setNodeToLabels(Map> map); + + @Public + @Evolving + public abstract Map> getNodeToLabels(); +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/NodeIdToLabels.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/NodeIdToLabels.java new file mode 100644 index 0000000..7b901c8 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/NodeIdToLabels.java @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import java.util.List; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.util.Records; + +@Public +@Evolving +public abstract class NodeIdToLabels { + @Public + @Evolving + public static NodeIdToLabels newInstance(NodeId node, List labels) { + NodeIdToLabels record = Records.newRecord(NodeIdToLabels.class); + record.setLabels(labels); + record.setNodeId(node); + return record; + } + + @Public + @Evolving + public abstract void setNodeId(NodeId node); + + @Public + @Evolving + public abstract NodeId getNodeId(); + + @Public + @Evolving + public abstract void setLabels(List labels); + + @Public + @Evolving + public abstract List getLabels(); +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/RemoveFromClusterNodeLabelsRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/RemoveFromClusterNodeLabelsRequest.java new file mode 100644 index 0000000..96297de --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/RemoveFromClusterNodeLabelsRequest.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import java.util.Set; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.util.Records; + +@Public +@Evolving +public abstract class RemoveFromClusterNodeLabelsRequest { + public static RemoveFromClusterNodeLabelsRequest newInstance( + Set labels) { + RemoveFromClusterNodeLabelsRequest request = + Records.newRecord(RemoveFromClusterNodeLabelsRequest.class); + request.setLabels(labels); + return request; + } + + @Public + @Evolving + public abstract void setLabels(Set labels); + + @Public + @Evolving + public abstract Set getLabels(); +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/RemoveFromClusterNodeLabelsResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/RemoveFromClusterNodeLabelsResponse.java new file mode 100644 index 0000000..de8867c --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/RemoveFromClusterNodeLabelsResponse.java @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.util.Records; + +@Public +@Evolving +public abstract class RemoveFromClusterNodeLabelsResponse { + public static RemoveFromClusterNodeLabelsResponse newInstance() { + return Records.newRecord(RemoveFromClusterNodeLabelsResponse.class); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/ReplaceLabelsOnNodeRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/ReplaceLabelsOnNodeRequest.java new file mode 100644 index 0000000..28e261a --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/ReplaceLabelsOnNodeRequest.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import java.util.Map; +import java.util.Set; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.util.Records; + +@Public +@Evolving +public abstract class ReplaceLabelsOnNodeRequest { + public static ReplaceLabelsOnNodeRequest newInstance( + Map> map) { + ReplaceLabelsOnNodeRequest request = + Records.newRecord(ReplaceLabelsOnNodeRequest.class); + request.setNodeToLabels(map); + return request; + } + + @Public + @Evolving + public abstract void setNodeToLabels(Map> map); + + @Public + @Evolving + public abstract Map> getNodeToLabels(); +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/ReplaceLabelsOnNodeResponse.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/ReplaceLabelsOnNodeResponse.java new file mode 100644 index 0000000..f087cc5 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/ReplaceLabelsOnNodeResponse.java @@ -0,0 +1,31 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords; + +import org.apache.hadoop.classification.InterfaceAudience.Public; +import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.yarn.util.Records; + +@Public +@Evolving +public abstract class ReplaceLabelsOnNodeResponse { + public static ReplaceLabelsOnNodeResponse newInstance() { + return Records.newRecord(ReplaceLabelsOnNodeResponse.class); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto index 4637f03..b7649e0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/server/yarn_server_resourcemanager_service_protos.proto @@ -75,6 +75,46 @@ message UpdateNodeResourceRequestProto { message UpdateNodeResourceResponseProto { } +message AddToClusterNodeLabelsRequestProto { + repeated string labels = 1; +} + +message AddToClusterNodeLabelsResponseProto { +} + +message RemoveFromClusterNodeLabelsRequestProto { + repeated string labels = 1; +} + +message RemoveFromClusterNodeLabelsResponseProto { +} + +message NodeIdToLabelsProto { + optional NodeIdProto nodeId = 1; + repeated string labels = 2; +} + +message ReplaceLabelsOnNodeRequestProto { + repeated NodeIdToLabelsProto nodeToLabels = 1; +} + +message ReplaceLabelsOnNodeResponseProto { + +} + +message GetNodeToLabelsRequestProto { +} + +message GetNodeToLabelsResponseProto { + repeated NodeIdToLabelsProto nodeToLabels = 1; +} + +message GetClusterNodeLabelsRequestProto { +} + +message GetClusterNodeLabelsResponseProto { + repeated string labels = 1; +} ////////////////////////////////////////////////////////////////// ///////////// RM Failover related records //////////////////////// diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto index 3db684b..1a35951 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto @@ -332,6 +332,8 @@ message QueueInfoProto { optional QueueStateProto state = 5; repeated QueueInfoProto childQueues = 6; repeated ApplicationReportProto applications = 7; + repeated string nodeLabels = 8; + optional string defaultNodeLabelExpression = 9; } enum QueueACLProto { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestApplicationClientProtocolOnHA.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestApplicationClientProtocolOnHA.java index bfc6656..5a54ff0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestApplicationClientProtocolOnHA.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestApplicationClientProtocolOnHA.java @@ -158,6 +158,7 @@ public void testGetContainersOnHA() throws Exception { reports); } + @SuppressWarnings("deprecation") @Test(timeout = 15000) public void testSubmitApplicationOnHA() throws Exception { ApplicationSubmissionContext appContext = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java index d7bea7a..e40d9dc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestYarnClient.java @@ -678,6 +678,7 @@ public ApplicationId run() throws Exception { } } + @SuppressWarnings("deprecation") private ApplicationId createApp(YarnClient rmClient, boolean unmanaged) throws Exception { YarnClientApplication newApp = rmClient.createApplication(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl.java index 56a5b58..a552e4d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/QueueInfoPBImpl.java @@ -19,8 +19,10 @@ package org.apache.hadoop.yarn.api.records.impl.pb; import java.util.ArrayList; +import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Set; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; @@ -44,6 +46,7 @@ List applicationsList; List childQueuesList; + Set nodeLabels; public QueueInfoPBImpl() { builder = QueueInfoProto.newBuilder(); @@ -281,6 +284,10 @@ private void mergeLocalToBuilder() { if (this.applicationsList != null) { addApplicationsToProto(); } + if (this.nodeLabels != null) { + builder.clearNodeLabels(); + builder.addAllNodeLabels(this.nodeLabels); + } } private void mergeLocalToProto() { @@ -322,5 +329,43 @@ private QueueState convertFromProtoFormat(QueueStateProto q) { private QueueStateProto convertToProtoFormat(QueueState queueState) { return ProtoUtils.convertToProtoFormat(queueState); } + + @Override + public void setNodeLabels(Set nodeLabels) { + maybeInitBuilder(); + builder.clearNodeLabels(); + this.nodeLabels = nodeLabels; + } + + private void initNodeLabels() { + if (this.nodeLabels != null) { + return; + } + QueueInfoProtoOrBuilder p = viaProto ? proto : builder; + this.nodeLabels = new HashSet(); + this.nodeLabels.addAll(p.getNodeLabelsList()); + } + + @Override + public Set getNodeLabels() { + initNodeLabels(); + return this.nodeLabels; + } + + @Override + public String getDefaultNodeLabelExpression() { + QueueInfoProtoOrBuilder p = viaProto ? proto : builder; + return (p.hasDefaultNodeLabelExpression()) ? p + .getDefaultNodeLabelExpression() : null; + } + @Override + public void setDefaultNodeLabelExpression(String defaultNodeLabelExpression) { + maybeInitBuilder(); + if (defaultNodeLabelExpression == null) { + builder.clearDefaultNodeLabelExpression(); + return; + } + builder.setDefaultNodeLabelExpression(defaultNodeLabelExpression); + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/AddToClusterNodeLabelsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/AddToClusterNodeLabelsRequestPBImpl.java new file mode 100644 index 0000000..0e5c655 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/AddToClusterNodeLabelsRequestPBImpl.java @@ -0,0 +1,98 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import java.util.HashSet; +import java.util.Set; + +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProtoOrBuilder; +import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsRequest; + +public class AddToClusterNodeLabelsRequestPBImpl extends + AddToClusterNodeLabelsRequest { + Set labels; + AddToClusterNodeLabelsRequestProto proto = AddToClusterNodeLabelsRequestProto + .getDefaultInstance(); + AddToClusterNodeLabelsRequestProto.Builder builder = null; + boolean viaProto = false; + + public AddToClusterNodeLabelsRequestPBImpl() { + this.builder = AddToClusterNodeLabelsRequestProto.newBuilder(); + } + + public AddToClusterNodeLabelsRequestPBImpl( + AddToClusterNodeLabelsRequestProto proto) { + this.proto = proto; + viaProto = true; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = AddToClusterNodeLabelsRequestProto.newBuilder(proto); + } + viaProto = false; + } + + private void mergeLocalToBuilder() { + if (this.labels != null && !this.labels.isEmpty()) { + builder.addAllLabels(this.labels); + } + } + + private void mergeLocalToProto() { + if (viaProto) + maybeInitBuilder(); + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + public AddToClusterNodeLabelsRequestProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + private void initLabels() { + if (this.labels != null) { + return; + } + AddToClusterNodeLabelsRequestProtoOrBuilder p = viaProto ? proto : builder; + this.labels = new HashSet(); + this.labels.addAll(p.getLabelsList()); + } + + @Override + public void setLabels(Set labels) { + maybeInitBuilder(); + if (labels == null || labels.isEmpty()) { + builder.clearLabels(); + } + this.labels = labels; + } + + @Override + public Set getLabels() { + initLabels(); + return this.labels; + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/AddToClusterNodeLabelsResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/AddToClusterNodeLabelsResponsePBImpl.java new file mode 100644 index 0000000..3d1f71c --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/AddToClusterNodeLabelsResponsePBImpl.java @@ -0,0 +1,69 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto; +import org.apache.hadoop.yarn.server.api.protocolrecords.AddToClusterNodeLabelsResponse; + +import com.google.protobuf.TextFormat; + +public class AddToClusterNodeLabelsResponsePBImpl extends + AddToClusterNodeLabelsResponse { + + AddToClusterNodeLabelsResponseProto proto = AddToClusterNodeLabelsResponseProto + .getDefaultInstance(); + AddToClusterNodeLabelsResponseProto.Builder builder = null; + boolean viaProto = false; + + public AddToClusterNodeLabelsResponsePBImpl() { + builder = AddToClusterNodeLabelsResponseProto.newBuilder(); + } + + public AddToClusterNodeLabelsResponsePBImpl( + AddToClusterNodeLabelsResponseProto proto) { + this.proto = proto; + viaProto = true; + } + + public AddToClusterNodeLabelsResponseProto getProto() { + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public int hashCode() { + return getProto().hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + @Override + public String toString() { + return TextFormat.shortDebugString(getProto()); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetClusterNodeLabelsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetClusterNodeLabelsRequestPBImpl.java new file mode 100644 index 0000000..9cb4206 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetClusterNodeLabelsRequestPBImpl.java @@ -0,0 +1,68 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetClusterNodeLabelsRequestProto; +import org.apache.hadoop.yarn.server.api.protocolrecords.GetClusterNodeLabelsRequest; + +import com.google.protobuf.TextFormat; + +public class GetClusterNodeLabelsRequestPBImpl extends + GetClusterNodeLabelsRequest { + + GetClusterNodeLabelsRequestProto proto = GetClusterNodeLabelsRequestProto + .getDefaultInstance(); + GetClusterNodeLabelsRequestProto.Builder builder = null; + boolean viaProto = false; + + public GetClusterNodeLabelsRequestPBImpl() { + builder = GetClusterNodeLabelsRequestProto.newBuilder(); + } + + public GetClusterNodeLabelsRequestPBImpl(GetClusterNodeLabelsRequestProto proto) { + this.proto = proto; + viaProto = true; + } + + public GetClusterNodeLabelsRequestProto getProto() { + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public int hashCode() { + return getProto().hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + @Override + public String toString() { + return TextFormat.shortDebugString(getProto()); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetClusterNodeLabelsResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetClusterNodeLabelsResponsePBImpl.java new file mode 100644 index 0000000..cd56f65 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetClusterNodeLabelsResponsePBImpl.java @@ -0,0 +1,98 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import java.util.HashSet; +import java.util.Set; + +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetClusterNodeLabelsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetClusterNodeLabelsResponseProtoOrBuilder; +import org.apache.hadoop.yarn.server.api.protocolrecords.GetClusterNodeLabelsResponse; + +public class GetClusterNodeLabelsResponsePBImpl extends + GetClusterNodeLabelsResponse { + Set labels; + GetClusterNodeLabelsResponseProto proto = GetClusterNodeLabelsResponseProto + .getDefaultInstance(); + GetClusterNodeLabelsResponseProto.Builder builder = null; + boolean viaProto = false; + + public GetClusterNodeLabelsResponsePBImpl() { + this.builder = GetClusterNodeLabelsResponseProto.newBuilder(); + } + + public GetClusterNodeLabelsResponsePBImpl( + GetClusterNodeLabelsResponseProto proto) { + this.proto = proto; + viaProto = true; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = GetClusterNodeLabelsResponseProto.newBuilder(proto); + } + viaProto = false; + } + + private void mergeLocalToBuilder() { + if (this.labels != null && !this.labels.isEmpty()) { + builder.addAllLabels(this.labels); + } + } + + private void mergeLocalToProto() { + if (viaProto) + maybeInitBuilder(); + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + public GetClusterNodeLabelsResponseProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + private void initLabels() { + if (this.labels != null) { + return; + } + GetClusterNodeLabelsResponseProtoOrBuilder p = viaProto ? proto : builder; + this.labels = new HashSet(); + this.labels.addAll(p.getLabelsList()); + } + + @Override + public void setLabels(Set labels) { + maybeInitBuilder(); + if (labels == null || labels.isEmpty()) { + builder.clearLabels(); + } + this.labels = labels; + } + + @Override + public Set getLabels() { + initLabels(); + return this.labels; + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetNodeToLabelsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetNodeToLabelsRequestPBImpl.java new file mode 100644 index 0000000..057f41b --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetNodeToLabelsRequestPBImpl.java @@ -0,0 +1,67 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetNodeToLabelsRequestProto; +import org.apache.hadoop.yarn.server.api.protocolrecords.GetNodeToLabelsRequest; + +import com.google.protobuf.TextFormat; + +public class GetNodeToLabelsRequestPBImpl extends GetNodeToLabelsRequest { + + GetNodeToLabelsRequestProto proto = GetNodeToLabelsRequestProto + .getDefaultInstance(); + GetNodeToLabelsRequestProto.Builder builder = null; + boolean viaProto = false; + + public GetNodeToLabelsRequestPBImpl() { + builder = GetNodeToLabelsRequestProto.newBuilder(); + } + + public GetNodeToLabelsRequestPBImpl(GetNodeToLabelsRequestProto proto) { + this.proto = proto; + viaProto = true; + } + + public GetNodeToLabelsRequestProto getProto() { + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public int hashCode() { + return getProto().hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + @Override + public String toString() { + return TextFormat.shortDebugString(getProto()); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetNodeToLabelsResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetNodeToLabelsResponsePBImpl.java new file mode 100644 index 0000000..13661b4 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/GetNodeToLabelsResponsePBImpl.java @@ -0,0 +1,152 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetNodeToLabelsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetNodeToLabelsResponseProtoOrBuilder; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsProto; +import org.apache.hadoop.yarn.server.api.protocolrecords.GetNodeToLabelsResponse; + +import com.google.common.collect.Sets; + +public class GetNodeToLabelsResponsePBImpl extends + GetNodeToLabelsResponse { + GetNodeToLabelsResponseProto proto = GetNodeToLabelsResponseProto + .getDefaultInstance(); + GetNodeToLabelsResponseProto.Builder builder = null; + boolean viaProto = false; + + private Map> nodeToLabels; + + public GetNodeToLabelsResponsePBImpl() { + this.builder = GetNodeToLabelsResponseProto.newBuilder(); + } + + public GetNodeToLabelsResponsePBImpl(GetNodeToLabelsResponseProto proto) { + this.proto = proto; + this.viaProto = true; + } + + private void initNodeToLabels() { + if (this.nodeToLabels != null) { + return; + } + GetNodeToLabelsResponseProtoOrBuilder p = viaProto ? proto : builder; + List list = p.getNodeToLabelsList(); + this.nodeToLabels = new HashMap>(); + + for (NodeIdToLabelsProto c : list) { + this.nodeToLabels.put(new NodeIdPBImpl(c.getNodeId()), + Sets.newHashSet(c.getLabelsList())); + } + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = GetNodeToLabelsResponseProto.newBuilder(proto); + } + viaProto = false; + } + + private void addNodeToLabelsToProto() { + maybeInitBuilder(); + builder.clearNodeToLabels(); + if (nodeToLabels == null) { + return; + } + Iterable iterable = + new Iterable() { + @Override + public Iterator iterator() { + return new Iterator() { + + Iterator>> iter = nodeToLabels + .entrySet().iterator(); + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + @Override + public NodeIdToLabelsProto next() { + Entry> now = iter.next(); + return NodeIdToLabelsProto.newBuilder() + .setNodeId(convertToProtoFormat(now.getKey())) + .addAllLabels(now.getValue()).build(); + } + + @Override + public boolean hasNext() { + return iter.hasNext(); + } + }; + } + }; + builder.addAllNodeToLabels(iterable); + } + + private void mergeLocalToBuilder() { + if (this.nodeToLabels != null) { + addNodeToLabelsToProto(); + } + } + + private void mergeLocalToProto() { + if (viaProto) + maybeInitBuilder(); + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + public GetNodeToLabelsResponseProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public Map> getNodeToLabels() { + initNodeToLabels(); + return this.nodeToLabels; + } + + @Override + public void setNodeToLabels(Map> map) { + initNodeToLabels(); + nodeToLabels.clear(); + nodeToLabels.putAll(map); + } + + private NodeIdProto convertToProtoFormat(NodeId t) { + return ((NodeIdPBImpl)t).getProto(); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeIdToLabelsPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeIdToLabelsPBImpl.java new file mode 100644 index 0000000..78caa74 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeIdToLabelsPBImpl.java @@ -0,0 +1,139 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsProtoOrBuilder; +import org.apache.hadoop.yarn.server.api.protocolrecords.NodeIdToLabels; + +public class NodeIdToLabelsPBImpl extends NodeIdToLabels { + private List labels; + private NodeId nodeId = null; + NodeIdToLabelsProto proto = NodeIdToLabelsProto + .getDefaultInstance(); + NodeIdToLabelsProto.Builder builder = null; + boolean viaProto = false; + + public NodeIdToLabelsPBImpl() { + this.builder = NodeIdToLabelsProto.newBuilder(); + } + + public NodeIdToLabelsPBImpl(NodeIdToLabelsProto proto) { + this.proto = proto; + viaProto = true; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = NodeIdToLabelsProto.newBuilder(proto); + } + viaProto = false; + } + + private void mergeLocalToBuilder() { + if (this.labels != null && !this.labels.isEmpty()) { + builder.addAllLabels(this.labels); + } + if (this.nodeId != null + && !((NodeIdPBImpl) nodeId).getProto().equals( + builder.getNodeId())) { + builder.setNodeId(convertToProtoFormat(this.nodeId)); + } + } + + private void mergeLocalToProto() { + if (viaProto) + maybeInitBuilder(); + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + public NodeIdToLabelsProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + private void initLabels() { + if (this.labels != null) { + return; + } + NodeIdToLabelsProtoOrBuilder p = viaProto ? proto : builder; + this.labels = new ArrayList(); + this.labels.addAll(p.getLabelsList()); + } + + @Override + public void setLabels(List labels) { + maybeInitBuilder(); + if (labels == null || labels.isEmpty()) { + builder.clearLabels(); + } + this.labels = labels; + } + + @Override + public List getLabels() { + initLabels(); + return this.labels; + } + + @Override + public void setNodeId(NodeId nodeId) { + maybeInitBuilder(); + if (nodeId == null) + builder.clearNodeId(); + this.nodeId = nodeId; + } + + @Override + public NodeId getNodeId() { + NodeIdToLabelsProtoOrBuilder p = viaProto ? proto : builder; + if (this.nodeId != null) { + return this.nodeId; + } + if (!p.hasNodeId()) { + return null; + } + this.nodeId = new NodeIdPBImpl(p.getNodeId()); + return this.nodeId; + } + + private NodeIdProto convertToProtoFormat(NodeId t) { + return ((NodeIdPBImpl)t).getProto(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RemoveFromClusterNodeLabelsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RemoveFromClusterNodeLabelsRequestPBImpl.java new file mode 100644 index 0000000..411c532 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RemoveFromClusterNodeLabelsRequestPBImpl.java @@ -0,0 +1,99 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import java.util.HashSet; +import java.util.Set; + +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProtoOrBuilder; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto; +import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsRequest; + +public class RemoveFromClusterNodeLabelsRequestPBImpl extends + RemoveFromClusterNodeLabelsRequest { + Set labels; + RemoveFromClusterNodeLabelsRequestProto proto = + RemoveFromClusterNodeLabelsRequestProto.getDefaultInstance(); + RemoveFromClusterNodeLabelsRequestProto.Builder builder = null; + boolean viaProto = false; + + public RemoveFromClusterNodeLabelsRequestPBImpl() { + this.builder = RemoveFromClusterNodeLabelsRequestProto.newBuilder(); + } + + public RemoveFromClusterNodeLabelsRequestPBImpl( + RemoveFromClusterNodeLabelsRequestProto proto) { + this.proto = proto; + viaProto = true; + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = RemoveFromClusterNodeLabelsRequestProto.newBuilder(proto); + } + viaProto = false; + } + + private void mergeLocalToBuilder() { + if (this.labels != null && !this.labels.isEmpty()) { + builder.addAllLabels(this.labels); + } + } + + private void mergeLocalToProto() { + if (viaProto) + maybeInitBuilder(); + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + public RemoveFromClusterNodeLabelsRequestProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + private void initLabels() { + if (this.labels != null) { + return; + } + RemoveFromClusterNodeLabelsRequestProtoOrBuilder p = + viaProto ? proto : builder; + this.labels = new HashSet(); + this.labels.addAll(p.getLabelsList()); + } + + @Override + public void setLabels(Set partitions) { + maybeInitBuilder(); + if (partitions == null || partitions.isEmpty()) { + builder.clearLabels(); + } + this.labels = partitions; + } + + @Override + public Set getLabels() { + initLabels(); + return this.labels; + } + +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RemoveFromClusterNodeLabelsResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RemoveFromClusterNodeLabelsResponsePBImpl.java new file mode 100644 index 0000000..43cf948 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RemoveFromClusterNodeLabelsResponsePBImpl.java @@ -0,0 +1,69 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto; +import org.apache.hadoop.yarn.server.api.protocolrecords.RemoveFromClusterNodeLabelsResponse; + +import com.google.protobuf.TextFormat; + +public class RemoveFromClusterNodeLabelsResponsePBImpl extends + RemoveFromClusterNodeLabelsResponse { + + RemoveFromClusterNodeLabelsResponseProto proto = + RemoveFromClusterNodeLabelsResponseProto.getDefaultInstance(); + RemoveFromClusterNodeLabelsResponseProto.Builder builder = null; + boolean viaProto = false; + + public RemoveFromClusterNodeLabelsResponsePBImpl() { + builder = RemoveFromClusterNodeLabelsResponseProto.newBuilder(); + } + + public RemoveFromClusterNodeLabelsResponsePBImpl( + RemoveFromClusterNodeLabelsResponseProto proto) { + this.proto = proto; + viaProto = true; + } + + public RemoveFromClusterNodeLabelsResponseProto getProto() { + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public int hashCode() { + return getProto().hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + @Override + public String toString() { + return TextFormat.shortDebugString(getProto()); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeRequestPBImpl.java new file mode 100644 index 0000000..6d07cb4 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeRequestPBImpl.java @@ -0,0 +1,152 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProtoOrBuilder; +import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeRequest; + +import com.google.common.collect.Sets; + +public class ReplaceLabelsOnNodeRequestPBImpl extends + ReplaceLabelsOnNodeRequest { + ReplaceLabelsOnNodeRequestProto proto = ReplaceLabelsOnNodeRequestProto + .getDefaultInstance(); + ReplaceLabelsOnNodeRequestProto.Builder builder = null; + boolean viaProto = false; + + private Map> nodeIdToLabels; + + public ReplaceLabelsOnNodeRequestPBImpl() { + this.builder = ReplaceLabelsOnNodeRequestProto.newBuilder(); + } + + public ReplaceLabelsOnNodeRequestPBImpl(ReplaceLabelsOnNodeRequestProto proto) { + this.proto = proto; + this.viaProto = true; + } + + private void initNodeToLabels() { + if (this.nodeIdToLabels != null) { + return; + } + ReplaceLabelsOnNodeRequestProtoOrBuilder p = viaProto ? proto : builder; + List list = p.getNodeToLabelsList(); + this.nodeIdToLabels = new HashMap>(); + + for (NodeIdToLabelsProto c : list) { + this.nodeIdToLabels.put(new NodeIdPBImpl(c.getNodeId()), + Sets.newHashSet(c.getLabelsList())); + } + } + + private void maybeInitBuilder() { + if (viaProto || builder == null) { + builder = ReplaceLabelsOnNodeRequestProto.newBuilder(proto); + } + viaProto = false; + } + + private void addNodeToLabelsToProto() { + maybeInitBuilder(); + builder.clearNodeToLabels(); + if (nodeIdToLabels == null) { + return; + } + Iterable iterable = + new Iterable() { + @Override + public Iterator iterator() { + return new Iterator() { + + Iterator>> iter = nodeIdToLabels + .entrySet().iterator(); + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + @Override + public NodeIdToLabelsProto next() { + Entry> now = iter.next(); + return NodeIdToLabelsProto.newBuilder() + .setNodeId(convertToProtoFormat(now.getKey())) + .addAllLabels(now.getValue()).build(); + } + + @Override + public boolean hasNext() { + return iter.hasNext(); + } + }; + } + }; + builder.addAllNodeToLabels(iterable); + } + + private void mergeLocalToBuilder() { + if (this.nodeIdToLabels != null) { + addNodeToLabelsToProto(); + } + } + + private void mergeLocalToProto() { + if (viaProto) + maybeInitBuilder(); + mergeLocalToBuilder(); + proto = builder.build(); + viaProto = true; + } + + public ReplaceLabelsOnNodeRequestProto getProto() { + mergeLocalToProto(); + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public Map> getNodeToLabels() { + initNodeToLabels(); + return this.nodeIdToLabels; + } + + @Override + public void setNodeToLabels(Map> map) { + initNodeToLabels(); + nodeIdToLabels.clear(); + nodeIdToLabels.putAll(map); + } + + private NodeIdProto convertToProtoFormat(NodeId t) { + return ((NodeIdPBImpl) t).getProto(); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeResponsePBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeResponsePBImpl.java new file mode 100644 index 0000000..cd52b61 --- /dev/null +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/ReplaceLabelsOnNodeResponsePBImpl.java @@ -0,0 +1,69 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb; + +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto; +import org.apache.hadoop.yarn.server.api.protocolrecords.ReplaceLabelsOnNodeResponse; + +import com.google.protobuf.TextFormat; + +public class ReplaceLabelsOnNodeResponsePBImpl extends + ReplaceLabelsOnNodeResponse { + + ReplaceLabelsOnNodeResponseProto proto = ReplaceLabelsOnNodeResponseProto + .getDefaultInstance(); + ReplaceLabelsOnNodeResponseProto.Builder builder = null; + boolean viaProto = false; + + public ReplaceLabelsOnNodeResponsePBImpl() { + builder = ReplaceLabelsOnNodeResponseProto.newBuilder(); + } + + public ReplaceLabelsOnNodeResponsePBImpl( + ReplaceLabelsOnNodeResponseProto proto) { + this.proto = proto; + viaProto = true; + } + + public ReplaceLabelsOnNodeResponseProto getProto() { + proto = viaProto ? proto : builder.build(); + viaProto = true; + return proto; + } + + @Override + public int hashCode() { + return getProto().hashCode(); + } + + @Override + public boolean equals(Object other) { + if (other == null) + return false; + if (other.getClass().isAssignableFrom(this.getClass())) { + return this.getProto().equals(this.getClass().cast(other).getProto()); + } + return false; + } + + @Override + public String toString() { + return TextFormat.shortDebugString(getProto()); + } +} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java index e9ca76f..7ea614a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestPBImplRecords.java @@ -36,15 +36,271 @@ import org.apache.commons.lang.math.LongRange; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.security.proto.SecurityProtos.*; -import org.apache.hadoop.yarn.api.protocolrecords.*; -import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.*; -import org.apache.hadoop.yarn.api.records.*; -import org.apache.hadoop.yarn.api.records.impl.pb.*; -import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.*; -import org.apache.hadoop.yarn.proto.YarnProtos.*; -import org.apache.hadoop.yarn.proto.YarnServiceProtos.*; -import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.*; +import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto; +import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto; +import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto; +import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto; +import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto; +import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto; +import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; +import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.AllocateRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.AllocateResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.CancelDelegationTokenRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.CancelDelegationTokenResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationMasterRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationMasterResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptReportRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptReportResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptsRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptsResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerReportRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerReportResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerStatusesRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerStatusesResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainersRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainersResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetDelegationTokenRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetDelegationTokenResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.KillApplicationRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.KillApplicationResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.MoveApplicationAcrossQueuesRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.MoveApplicationAcrossQueuesResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationDeleteRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationDeleteResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationSubmissionRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationSubmissionResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationUpdateRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationUpdateResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainerRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainersRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainersResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainersRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainersResponsePBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl; +import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; +import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; +import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.ApplicationReport; +import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; +import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; +import org.apache.hadoop.yarn.api.records.Container; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; +import org.apache.hadoop.yarn.api.records.ContainerReport; +import org.apache.hadoop.yarn.api.records.ContainerResourceDecrease; +import org.apache.hadoop.yarn.api.records.ContainerResourceIncrease; +import org.apache.hadoop.yarn.api.records.ContainerResourceIncreaseRequest; +import org.apache.hadoop.yarn.api.records.ContainerStatus; +import org.apache.hadoop.yarn.api.records.LocalResource; +import org.apache.hadoop.yarn.api.records.LogAggregationContext; +import org.apache.hadoop.yarn.api.records.NMToken; +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.api.records.NodeReport; +import org.apache.hadoop.yarn.api.records.PreemptionContainer; +import org.apache.hadoop.yarn.api.records.PreemptionContract; +import org.apache.hadoop.yarn.api.records.PreemptionMessage; +import org.apache.hadoop.yarn.api.records.PreemptionResourceRequest; +import org.apache.hadoop.yarn.api.records.Priority; +import org.apache.hadoop.yarn.api.records.QueueInfo; +import org.apache.hadoop.yarn.api.records.QueueState; +import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; +import org.apache.hadoop.yarn.api.records.ReservationDefinition; +import org.apache.hadoop.yarn.api.records.ReservationId; +import org.apache.hadoop.yarn.api.records.ReservationRequest; +import org.apache.hadoop.yarn.api.records.ReservationRequests; +import org.apache.hadoop.yarn.api.records.Resource; +import org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest; +import org.apache.hadoop.yarn.api.records.ResourceOption; +import org.apache.hadoop.yarn.api.records.ResourceRequest; +import org.apache.hadoop.yarn.api.records.SerializedException; +import org.apache.hadoop.yarn.api.records.StrictPreemptionContract; +import org.apache.hadoop.yarn.api.records.Token; +import org.apache.hadoop.yarn.api.records.URL; +import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptReportPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationResourceUsageReportPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationSubmissionContextPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerLaunchContextPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerReportPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerResourceDecreasePBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerResourceIncreasePBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerResourceIncreaseRequestPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ContainerStatusPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.LocalResourcePBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.NMTokenPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.NodeReportPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionContainerPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionContractPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionMessagePBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionResourceRequestPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.PriorityPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.QueueInfoPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.QueueUserACLInfoPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ResourceBlacklistRequestPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ResourceOptionPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.ResourceRequestPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.SerializedExceptionPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.StrictPreemptionContractPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.TokenPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.URLPBImpl; +import org.apache.hadoop.yarn.api.records.impl.pb.YarnClusterMetricsPBImpl; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerResourceDecreaseProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerResourceIncreaseProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerResourceIncreaseRequestProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto; +import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto; +import org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto; +import org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto; +import org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto; +import org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto; +import org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto; +import org.apache.hadoop.yarn.proto.YarnProtos.URLProto; +import org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetClusterNodeLabelsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetClusterNodeLabelsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetNodeToLabelsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetNodeToLabelsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.NodeIdToLabelsProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshAdminAclsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshNodesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshQueuesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshServiceAclsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshSuperUserGroupsConfigurationResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RefreshUserToGroupsMappingsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.RemoveFromClusterNodeLabelsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.ReplaceLabelsOnNodeResponseProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceRequestProto; +import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.UpdateNodeResourceResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.NMTokenProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainersResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto; +import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.GetClusterNodeLabelsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.GetClusterNodeLabelsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.GetNodeToLabelsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.GetNodeToLabelsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.NodeIdToLabelsPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshAdminAclsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshAdminAclsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshNodesRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshNodesResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshQueuesRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshQueuesResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshServiceAclsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshServiceAclsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshSuperUserGroupsConfigurationRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshSuperUserGroupsConfigurationResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.RemoveFromClusterNodeLabelsResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.ReplaceLabelsOnNodeResponsePBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceRequestPBImpl; +import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.UpdateNodeResourceResponsePBImpl; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Ignore; @@ -934,4 +1190,69 @@ public void testReservationDeleteResponsePBImpl() throws Exception { validatePBImplRecord(ReservationDeleteResponsePBImpl.class, ReservationDeleteResponseProto.class); } + + @Test + public void testNodeToLabelsPBImpl() throws Exception { + validatePBImplRecord(NodeIdToLabelsPBImpl.class, NodeIdToLabelsProto.class); + } + + @Test + public void testAddToClusterNodeLabelsRequestPBImpl() throws Exception { + validatePBImplRecord(AddToClusterNodeLabelsRequestPBImpl.class, + AddToClusterNodeLabelsRequestProto.class); + } + + @Test + public void testAddToClusterNodeLabelsResponsePBImpl() throws Exception { + validatePBImplRecord(AddToClusterNodeLabelsResponsePBImpl.class, + AddToClusterNodeLabelsResponseProto.class); + } + + @Test + public void testRemoveFromClusterNodeLabelsRequestPBImpl() throws Exception { + validatePBImplRecord(RemoveFromClusterNodeLabelsRequestPBImpl.class, + RemoveFromClusterNodeLabelsRequestProto.class); + } + + @Test + public void testRemoveFromClusterNodeLabelsResponsePBImpl() throws Exception { + validatePBImplRecord(RemoveFromClusterNodeLabelsResponsePBImpl.class, + RemoveFromClusterNodeLabelsResponseProto.class); + } + + @Test + public void testGetClusterNodeLabelsRequestPBImpl() throws Exception { + validatePBImplRecord(GetClusterNodeLabelsRequestPBImpl.class, + GetClusterNodeLabelsRequestProto.class); + } + + @Test + public void testGetClusterNodeLabelsResponsePBImpl() throws Exception { + validatePBImplRecord(GetClusterNodeLabelsResponsePBImpl.class, + GetClusterNodeLabelsResponseProto.class); + } + + @Test + public void testReplaceLabelsOnNodeRequestPBImpl() throws Exception { + validatePBImplRecord(ReplaceLabelsOnNodeRequestPBImpl.class, + ReplaceLabelsOnNodeRequestProto.class); + } + + @Test + public void testReplaceLabelsOnNodeResponsePBImpl() throws Exception { + validatePBImplRecord(ReplaceLabelsOnNodeResponsePBImpl.class, + ReplaceLabelsOnNodeResponseProto.class); + } + + @Test + public void testGetNodeToLabelsRequestPBImpl() throws Exception { + validatePBImplRecord(GetNodeToLabelsRequestPBImpl.class, + GetNodeToLabelsRequestProto.class); + } + + @Test + public void testGetNodeToLabelsResponsePBImpl() throws Exception { + validatePBImplRecord(GetNodeToLabelsResponsePBImpl.class, + GetNodeToLabelsResponseProto.class); + } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java index 59db66a..f5e8ae1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/utils/BuilderUtils.java @@ -338,6 +338,7 @@ public static ApplicationReport newApplicationReport( return report; } + @SuppressWarnings("deprecation") public static ApplicationSubmissionContext newApplicationSubmissionContext( ApplicationId applicationId, String applicationName, String queue, Priority priority, ContainerLaunchContext amContainer,