diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
index dae9be0..197c7f7 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
@@ -101,6 +101,7 @@
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent;
import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
@@ -510,9 +511,7 @@ public TaskAttemptImpl(TaskId taskId, int i,
oldJobId = TypeConverter.fromYarn(taskId.getJobId());
this.conf = conf;
this.clock = clock;
- attemptId = recordFactory.newRecordInstance(TaskAttemptId.class);
- attemptId.setTaskId(taskId);
- attemptId.setId(i);
+ attemptId = MRBuilderUtils.newTaskAttemptId(taskId, i);
this.taskAttemptListener = taskAttemptListener;
this.appContext = appContext;
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
index abb2397..c75dfae 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
@@ -46,7 +46,6 @@
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.util.BuilderUtils;
-import org.apache.hadoop.yarn.util.Records;
/**
* Allocates containers locally. Doesn't allocate a real container;
@@ -134,16 +133,13 @@ protected synchronized void heartbeat() throws Exception {
public void handle(ContainerAllocatorEvent event) {
if (event.getType() == ContainerAllocator.EventType.CONTAINER_REQ) {
LOG.info("Processing the event " + event.toString());
- ContainerId cID = recordFactory.newRecordInstance(ContainerId.class);
- cID.setApplicationAttemptId(applicationAttemptId);
// Assign the same container ID as the AM
- cID.setId(this.containerId.getId());
+ ContainerId cID = BuilderUtils.newContainerId(applicationAttemptId,
+ this.containerId.getId());
Container container = recordFactory.newRecordInstance(Container.class);
container.setId(cID);
- NodeId nodeId = Records.newRecord(NodeId.class);
- nodeId.setHost(this.nmHost);
- nodeId.setPort(this.nmPort);
+ NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort);
container.setNodeId(nodeId);
container.setContainerToken(null);
container.setNodeHttpAddress(this.nmHost + ":" + this.nmHttpPort);
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
index 3d3994f..44fdc25 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java
@@ -318,8 +318,7 @@ public void testSigTermedFunctionality() throws IOException {
Job job = Mockito.mock(Job.class);
Mockito.when(mockedContext.getJob(jobId)).thenReturn(job);
// Make TypeConverter(JobID) pass
- ApplicationId mockAppId = Mockito.mock(ApplicationId.class);
- Mockito.when(mockAppId.getClusterTimestamp()).thenReturn(1000l);
+ ApplicationId mockAppId = BuilderUtils.newApplicationId(1000, 0);
Mockito.when(jobId.getAppId()).thenReturn(mockAppId);
jheh.addToFileMap(jobId);
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
index 4a28ab0..e1f0376 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
@@ -92,8 +92,6 @@
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.event.EventHandler;
-import org.apache.hadoop.yarn.factories.RecordFactory;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.service.Service;
import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory;
@@ -119,18 +117,13 @@
public static int NM_PORT = 1234;
public static int NM_HTTP_PORT = 8042;
- private static final RecordFactory recordFactory =
- RecordFactoryProvider.getRecordFactory(null);
-
//if true, tasks complete automatically as soon as they are launched
protected boolean autoComplete = false;
static ApplicationId applicationId;
static {
- applicationId = recordFactory.newRecordInstance(ApplicationId.class);
- applicationId.setClusterTimestamp(0);
- applicationId.setId(0);
+ applicationId = BuilderUtils.newApplicationId(0, 0);
}
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
@@ -152,11 +145,7 @@ protected void downloadTokensAndSetupUGI(Configuration conf) {
private static ApplicationAttemptId getApplicationAttemptId(
ApplicationId applicationId, int startCount) {
- ApplicationAttemptId applicationAttemptId =
- recordFactory.newRecordInstance(ApplicationAttemptId.class);
- applicationAttemptId.setApplicationId(applicationId);
- applicationAttemptId.setAttemptId(startCount);
- return applicationAttemptId;
+ return BuilderUtils.newApplicationAttemptId(applicationId, startCount);
}
private static ContainerId getContainerId(ApplicationId applicationId,
@@ -513,9 +502,8 @@ protected ContainerAllocator createContainerAllocator(
@Override
public void handle(ContainerAllocatorEvent event) {
- ContainerId cId = recordFactory.newRecordInstance(ContainerId.class);
- cId.setApplicationAttemptId(getContext().getApplicationAttemptId());
- cId.setId(containerCount++);
+ ContainerId cId = BuilderUtils.newContainerId(
+ getContext().getApplicationAttemptId(), containerCount++);
NodeId nodeId = BuilderUtils.newNodeId(NM_HOST, NM_PORT);
Container container = BuilderUtils.newContainer(cId, nodeId,
NM_HOST + ":" + NM_HTTP_PORT, null, null, null);
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java
index efb8b7a..4d27304 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java
@@ -139,19 +139,14 @@ public void run() {
try {
if (concurrentRunningTasks < maxConcurrentRunningTasks) {
event = eventQueue.take();
- ContainerId cId =
- recordFactory.newRecordInstance(ContainerId.class);
- cId.setApplicationAttemptId(
- getContext().getApplicationAttemptId());
- cId.setId(containerCount++);
+ ContainerId cId = BuilderUtils.newContainerId(
+ getContext().getApplicationAttemptId(), containerCount++);
//System.out.println("Allocating " + containerCount);
Container container =
recordFactory.newRecordInstance(Container.class);
container.setId(cId);
- NodeId nodeId = recordFactory.newRecordInstance(NodeId.class);
- nodeId.setHost("dummy");
- nodeId.setPort(1234);
+ NodeId nodeId = BuilderUtils.newNodeId("dummy", 1234);
container.setNodeId(nodeId);
container.setContainerToken(null);
container.setNodeHttpAddress("localhost:8042");
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
index c1cddb8..d476446 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
@@ -147,10 +147,7 @@ public static String newJobName() {
}
public static JobId newJobID(ApplicationId appID, int i) {
- JobId id = Records.newRecord(JobId.class);
- id.setAppId(appID);
- id.setId(i);
- return id;
+ return MRBuilderUtils.newJobId(appID, i);
}
public static JobReport newJobReport(JobId id) {
@@ -226,9 +223,7 @@ public static Counters newCounters() {
}
public static TaskAttempt newTaskAttempt(TaskId tid, int i) {
- final TaskAttemptId taid = Records.newRecord(TaskAttemptId.class);
- taid.setTaskId(tid);
- taid.setId(i);
+ final TaskAttemptId taid = MRBuilderUtils.newTaskAttemptId(tid, i);
final TaskAttemptReport report = newTaskAttemptReport(taid);
final ListResourceManager along with a monotonically
* increasing counter for the jobId.
ResourceManager and is used to generate
* the globally unique JobId.
* @return associated ApplicationId
*/
- public abstract ApplicationId getAppId();
+ public ApplicationId getAppId() {
+ return applicationId;
+ }
/**
* Get the short integer identifier of the JobId
@@ -46,11 +67,9 @@
* of the ResourceManager.
* @return short integer identifier of the JobId
*/
- public abstract int getId();
-
- public abstract void setAppId(ApplicationId appId);
- public abstract void setId(int id);
-
+ public int getId() {
+ return proto.getId();
+ }
protected static final String JOB = "job";
protected static final char SEPARATOR = '_';
@@ -109,4 +128,8 @@ public int compareTo(JobId other) {
return appIdComp;
}
}
+
+ public JobIdProto getProto() {
+ return proto;
+ }
}
\ No newline at end of file
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java
index bae9099..0733bd6 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.mapreduce.v2.api.records;
+import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto;
+
/**
*
* TaskAttemptId represents the unique identifier for a task
@@ -31,20 +33,38 @@
* attempt number.
*
* TaskId represents the unique identifier for a Map or Reduce
@@ -33,28 +37,45 @@
* task number.
*
JobId
*/
- public abstract JobId getJobId();
+ public JobId getJobId() {
+ return jobId;
+ }
/**
* @return the type of the task - MAP/REDUCE
*/
- public abstract TaskType getTaskType();
+ public TaskType getTaskType() {
+ return convertFromProtoFormat(proto.getTaskType());
+ }
/**
* @return the task number.
*/
- public abstract int getId();
-
- public abstract void setJobId(JobId jobId);
-
- public abstract void setTaskType(TaskType taskType);
-
- public abstract void setId(int id);
+ public int getId() {
+ return proto.getId();
+ }
protected static final String TASK = "task";
@@ -123,4 +144,12 @@ public int compareTo(TaskId other) {
return jobIdComp;
}
}
+
+ public TaskIdProto getProto() {
+ return proto;
+ }
+
+ private TaskType convertFromProtoFormat(TaskTypeProto e) {
+ return MRProtoUtils.convertFromProtoFormat(e);
+ }
}
\ No newline at end of file
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/AMInfoPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/AMInfoPBImpl.java
index 325d9a8..d2b0815 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/AMInfoPBImpl.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/AMInfoPBImpl.java
@@ -24,8 +24,6 @@
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
-import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
@@ -56,12 +54,12 @@ public synchronized AMInfoProto getProto() {
private synchronized void mergeLocalToBuilder() {
if (this.appAttemptId != null
- && !((ApplicationAttemptIdPBImpl) this.appAttemptId).getProto().equals(
+ && !((ApplicationAttemptId) this.appAttemptId).getProto().equals(
builder.getApplicationAttemptId())) {
builder.setApplicationAttemptId(convertToProtoFormat(this.appAttemptId));
}
if (this.getContainerId() != null
- && !((ContainerIdPBImpl) this.containerId).getProto().equals(
+ && !((ContainerId) this.containerId).getProto().equals(
builder.getContainerId())) {
builder.setContainerId(convertToProtoFormat(this.containerId));
}
@@ -181,21 +179,21 @@ public synchronized void setNodeManagerHttpPort(int httpPort) {
builder.setNodeManagerHttpPort(httpPort);
}
- private ApplicationAttemptIdPBImpl convertFromProtoFormat(
+ private ApplicationAttemptId convertFromProtoFormat(
ApplicationAttemptIdProto p) {
- return new ApplicationAttemptIdPBImpl(p);
+ return new ApplicationAttemptId(p);
}
- private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) {
- return new ContainerIdPBImpl(p);
+ private ContainerId convertFromProtoFormat(ContainerIdProto p) {
+ return new ContainerId(p);
}
private
ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) {
- return ((ApplicationAttemptIdPBImpl) t).getProto();
+ return t.getProto();
}
private ContainerIdProto convertToProtoFormat(ContainerId t) {
- return ((ContainerIdPBImpl) t).getProto();
+ return t.getProto();
}
}
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java
deleted file mode 100644
index 4fc73bd..0000000
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements. See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership. The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License. You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-
-package org.apache.hadoop.mapreduce.v2.api.records.impl.pb;
-
-import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto;
-import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
-import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
-
-public class JobIdPBImpl extends JobId {
-
- JobIdProto proto = JobIdProto.getDefaultInstance();
- JobIdProto.Builder builder = null;
- boolean viaProto = false;
-
- private ApplicationId applicationId = null;
-
- public JobIdPBImpl() {
- builder = JobIdProto.newBuilder();
- }
-
- public JobIdPBImpl(JobIdProto proto) {
- this.proto = proto;
- viaProto = true;
- }
-
- public synchronized JobIdProto getProto() {
- mergeLocalToProto();
- proto = viaProto ? proto : builder.build();
- viaProto = true;
- return proto;
- }
-
- private synchronized void mergeLocalToBuilder() {
- if (this.applicationId != null
- && !((ApplicationIdPBImpl) this.applicationId).getProto().equals(
- builder.getAppId())) {
- builder.setAppId(convertToProtoFormat(this.applicationId));
- }
- }
-
- private synchronized void mergeLocalToProto() {
- if (viaProto)
- maybeInitBuilder();
- mergeLocalToBuilder();
- proto = builder.build();
- viaProto = true;
- }
-
- private synchronized void maybeInitBuilder() {
- if (viaProto || builder == null) {
- builder = JobIdProto.newBuilder(proto);
- }
- viaProto = false;
- }
-
-
- @Override
- public synchronized ApplicationId getAppId() {
- JobIdProtoOrBuilder p = viaProto ? proto : builder;
- if (applicationId != null) {
- return applicationId;
- } // Else via proto
- if (!p.hasAppId()) {
- return null;
- }
- applicationId = convertFromProtoFormat(p.getAppId());
- return applicationId;
- }
-
- @Override
- public synchronized void setAppId(ApplicationId appId) {
- maybeInitBuilder();
- if (appId == null) {
- builder.clearAppId();
- }
- this.applicationId = appId;
- }
- @Override
- public synchronized int getId() {
- JobIdProtoOrBuilder p = viaProto ? proto : builder;
- return (p.getId());
- }
-
- @Override
- public synchronized void setId(int id) {
- maybeInitBuilder();
- builder.setId((id));
- }
-
- private ApplicationIdPBImpl convertFromProtoFormat(
- ApplicationIdProto p) {
- return new ApplicationIdPBImpl(p);
- }
-
- private ApplicationIdProto convertToProtoFormat(ApplicationId t) {
- return ((ApplicationIdPBImpl) t).getProto();
- }
-}
\ No newline at end of file
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java
index 1b16c86..2413f0f 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java
@@ -317,12 +317,12 @@ private AMInfoProto convertToProtoFormat(AMInfo t) {
return ((AMInfoPBImpl)t).getProto();
}
- private JobIdPBImpl convertFromProtoFormat(JobIdProto p) {
- return new JobIdPBImpl(p);
+ private JobId convertFromProtoFormat(JobIdProto p) {
+ return new JobId(p);
}
private JobIdProto convertToProtoFormat(JobId t) {
- return ((JobIdPBImpl)t).getProto();
+ return t.getProto();
}
private JobStateProto convertToProtoFormat(JobState e) {
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptCompletionEventPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptCompletionEventPBImpl.java
index d6c8589..8464398 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptCompletionEventPBImpl.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptCompletionEventPBImpl.java
@@ -156,12 +156,12 @@ public void setEventId(int eventId) {
builder.setEventId((eventId));
}
- private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) {
- return new TaskAttemptIdPBImpl(p);
+ private TaskAttemptId convertFromProtoFormat(TaskAttemptIdProto p) {
+ return new TaskAttemptId(p);
}
private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) {
- return ((TaskAttemptIdPBImpl)t).getProto();
+ return t.getProto();
}
private TaskAttemptCompletionEventStatusProto convertToProtoFormat(TaskAttemptCompletionEventStatus e) {
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java
deleted file mode 100644
index 9a82482..0000000
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.mapreduce.v2.api.records.impl.pb;
-
-import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
-import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto;
-import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder;
-import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto;
-
-public class TaskAttemptIdPBImpl extends TaskAttemptId {
- TaskAttemptIdProto proto = TaskAttemptIdProto.getDefaultInstance();
- TaskAttemptIdProto.Builder builder = null;
- boolean viaProto = false;
-
- private TaskId taskId = null;
-
-
-
- public TaskAttemptIdPBImpl() {
- builder = TaskAttemptIdProto.newBuilder();
- }
-
- public TaskAttemptIdPBImpl(TaskAttemptIdProto proto) {
- this.proto = proto;
- viaProto = true;
- }
-
- public synchronized TaskAttemptIdProto getProto() {
- mergeLocalToProto();
- proto = viaProto ? proto : builder.build();
- viaProto = true;
- return proto;
- }
-
- private synchronized void mergeLocalToBuilder() {
- if (this.taskId != null
- && !((TaskIdPBImpl) this.taskId).getProto().equals(builder.getTaskId())) {
- builder.setTaskId(convertToProtoFormat(this.taskId));
- }
- }
-
- private synchronized void mergeLocalToProto() {
- if (viaProto)
- maybeInitBuilder();
- mergeLocalToBuilder();
- proto = builder.build();
- viaProto = true;
- }
-
- private synchronized void maybeInitBuilder() {
- if (viaProto || builder == null) {
- builder = TaskAttemptIdProto.newBuilder(proto);
- }
- viaProto = false;
- }
-
-
- @Override
- public synchronized int getId() {
- TaskAttemptIdProtoOrBuilder p = viaProto ? proto : builder;
- return (p.getId());
- }
-
- @Override
- public synchronized void setId(int id) {
- maybeInitBuilder();
- builder.setId((id));
- }
- @Override
- public synchronized TaskId getTaskId() {
- TaskAttemptIdProtoOrBuilder p = viaProto ? proto : builder;
- if (this.taskId != null) {
- return this.taskId;
- }
- if (!p.hasTaskId()) {
- return null;
- }
- taskId = convertFromProtoFormat(p.getTaskId());
- return taskId;
- }
-
- @Override
- public synchronized void setTaskId(TaskId taskId) {
- maybeInitBuilder();
- if (taskId == null)
- builder.clearTaskId();
- this.taskId = taskId;
- }
-
- private TaskIdPBImpl convertFromProtoFormat(TaskIdProto p) {
- return new TaskIdPBImpl(p);
- }
-
- private TaskIdProto convertToProtoFormat(TaskId t) {
- return ((TaskIdPBImpl)t).getProto();
- }
-}
\ No newline at end of file
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptReportPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptReportPBImpl.java
index 999d770..8014bd2 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptReportPBImpl.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptReportPBImpl.java
@@ -33,7 +33,6 @@
import org.apache.hadoop.mapreduce.v2.util.MRProtoUtils;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
-import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
@@ -329,11 +328,11 @@ public void setContainerId(ContainerId containerId) {
}
private ContainerIdProto convertToProtoFormat(ContainerId t) {
- return ((ContainerIdPBImpl)t).getProto();
+ return t.getProto();
}
- private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) {
- return new ContainerIdPBImpl(p);
+ private ContainerId convertFromProtoFormat(ContainerIdProto p) {
+ return new ContainerId(p);
}
private CountersPBImpl convertFromProtoFormat(CountersProto p) {
@@ -344,12 +343,12 @@ private CountersProto convertToProtoFormat(Counters t) {
return ((CountersPBImpl)t).getProto();
}
- private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) {
- return new TaskAttemptIdPBImpl(p);
+ private TaskAttemptId convertFromProtoFormat(TaskAttemptIdProto p) {
+ return new TaskAttemptId(p);
}
private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) {
- return ((TaskAttemptIdPBImpl)t).getProto();
+ return t.getProto();
}
private TaskAttemptStateProto convertToProtoFormat(TaskAttemptState e) {
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java
deleted file mode 100644
index 8a88b87..0000000
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements. See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership. The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License. You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-
-package org.apache.hadoop.mapreduce.v2.api.records.impl.pb;
-
-import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
-import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto;
-import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto;
-import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder;
-import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto;
-import org.apache.hadoop.mapreduce.v2.util.MRProtoUtils;
-
-public class TaskIdPBImpl extends TaskId {
- TaskIdProto proto = TaskIdProto.getDefaultInstance();
- TaskIdProto.Builder builder = null;
- boolean viaProto = false;
-
- private JobId jobId = null;
-
- public TaskIdPBImpl() {
- builder = TaskIdProto.newBuilder(proto);
- }
-
- public TaskIdPBImpl(TaskIdProto proto) {
- this.proto = proto;
- viaProto = true;
- }
-
- public synchronized TaskIdProto getProto() {
- mergeLocalToProto();
- proto = viaProto ? proto : builder.build();
- viaProto = true;
- return proto;
- }
-
- private synchronized void mergeLocalToBuilder() {
- if (this.jobId != null
- && !((JobIdPBImpl) this.jobId).getProto().equals(builder.getJobId())) {
- builder.setJobId(convertToProtoFormat(this.jobId));
- }
- }
-
- private synchronized void mergeLocalToProto() {
- if (viaProto)
- maybeInitBuilder();
- mergeLocalToBuilder();
- proto = builder.build();
- viaProto = true;
- }
-
- private synchronized void maybeInitBuilder() {
- if (viaProto || builder == null) {
- builder = TaskIdProto.newBuilder(proto);
- }
- viaProto = false;
- }
-
- @Override
- public synchronized int getId() {
- TaskIdProtoOrBuilder p = viaProto ? proto : builder;
- return (p.getId());
- }
-
- @Override
- public synchronized void setId(int id) {
- maybeInitBuilder();
- builder.setId((id));
- }
-
- @Override
- public synchronized JobId getJobId() {
- TaskIdProtoOrBuilder p = viaProto ? proto : builder;
- if (this.jobId != null) {
- return this.jobId;
- }
- if (!p.hasJobId()) {
- return null;
- }
- jobId = convertFromProtoFormat(p.getJobId());
- return jobId;
- }
-
- @Override
- public synchronized void setJobId(JobId jobId) {
- maybeInitBuilder();
- if (jobId == null)
- builder.clearJobId();
- this.jobId = jobId;
- }
-
- @Override
- public synchronized TaskType getTaskType() {
- TaskIdProtoOrBuilder p = viaProto ? proto : builder;
- if (!p.hasTaskType()) {
- return null;
- }
- return convertFromProtoFormat(p.getTaskType());
- }
-
- @Override
- public synchronized void setTaskType(TaskType taskType) {
- maybeInitBuilder();
- if (taskType == null) {
- builder.clearTaskType();
- return;
- }
- builder.setTaskType(convertToProtoFormat(taskType));
- }
-
- private JobIdPBImpl convertFromProtoFormat(JobIdProto p) {
- return new JobIdPBImpl(p);
- }
-
- private JobIdProto convertToProtoFormat(JobId t) {
- return ((JobIdPBImpl)t).getProto();
- }
-
- private TaskTypeProto convertToProtoFormat(TaskType e) {
- return MRProtoUtils.convertToProtoFormat(e);
- }
-
- private TaskType convertFromProtoFormat(TaskTypeProto e) {
- return MRProtoUtils.convertFromProtoFormat(e);
- }
-}
\ No newline at end of file
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java
index f1dfe32..b98db55 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java
@@ -367,12 +367,12 @@ private CountersProto convertToProtoFormat(Counters t) {
return ((CountersPBImpl)t).getProto();
}
- private TaskIdPBImpl convertFromProtoFormat(TaskIdProto p) {
- return new TaskIdPBImpl(p);
+ private TaskId convertFromProtoFormat(TaskIdProto p) {
+ return new TaskId(p);
}
private TaskIdProto convertToProtoFormat(TaskId t) {
- return ((TaskIdPBImpl)t).getProto();
+ return ((TaskId)t).getProto();
}
private TaskStateProto convertToProtoFormat(TaskState e) {
@@ -383,14 +383,11 @@ private TaskState convertFromProtoFormat(TaskStateProto e) {
return MRProtoUtils.convertFromProtoFormat(e);
}
- private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) {
- return new TaskAttemptIdPBImpl(p);
+ private TaskAttemptId convertFromProtoFormat(TaskAttemptIdProto p) {
+ return new TaskAttemptId(p);
}
private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) {
- return ((TaskAttemptIdPBImpl)t).getProto();
+ return t.getProto();
}
-
-
-
}
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
index 89cfe42..be8de06 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java
@@ -27,6 +27,9 @@
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
@@ -36,10 +39,9 @@
public class MRBuilderUtils {
public static JobId newJobId(ApplicationId appId, int id) {
- JobId jobId = Records.newRecord(JobId.class);
- jobId.setAppId(appId);
+ JobIdProto.Builder jobId = JobIdProto.newBuilder();
jobId.setId(id);
- return jobId;
+ return new JobId(jobId, appId);
}
public static JobId newJobId(long clusterTs, int appIdInt, int id) {
@@ -48,19 +50,16 @@ public static JobId newJobId(long clusterTs, int appIdInt, int id) {
}
public static TaskId newTaskId(JobId jobId, int id, TaskType taskType) {
- TaskId taskId = Records.newRecord(TaskId.class);
- taskId.setJobId(jobId);
+ TaskIdProto.Builder taskId = TaskIdProto.newBuilder();
taskId.setId(id);
- taskId.setTaskType(taskType);
- return taskId;
+ taskId.setTaskType(MRProtoUtils.convertToProtoFormat(taskType));
+ return new TaskId(taskId, jobId);
}
public static TaskAttemptId newTaskAttemptId(TaskId taskId, int attemptId) {
- TaskAttemptId taskAttemptId =
- Records.newRecord(TaskAttemptId.class);
- taskAttemptId.setTaskId(taskId);
+ TaskAttemptIdProto.Builder taskAttemptId = TaskAttemptIdProto.newBuilder();
taskAttemptId.setId(attemptId);
- return taskAttemptId;
+ return new TaskAttemptId(taskAttemptId, taskId);
}
public static JobReport newJobReport(JobId jobId, String jobName,
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
index 49dec4a..aefff8c 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
@@ -30,7 +30,6 @@
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationResourceUsageReportPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.QueueInfoPBImpl;
@@ -70,7 +69,7 @@ public void testEnums() throws Exception {
public void testFromYarn() throws Exception {
int appStartTime = 612354;
YarnApplicationState state = YarnApplicationState.RUNNING;
- ApplicationId applicationId = new ApplicationIdPBImpl();
+ ApplicationId applicationId = new ApplicationId();
ApplicationReportPBImpl applicationReport = new ApplicationReportPBImpl();
applicationReport.setApplicationId(applicationId);
applicationReport.setYarnApplicationState(state);
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
index 05497cc..dda9ce7 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
@@ -42,11 +42,9 @@
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -80,8 +78,7 @@ private static void delete(File dir) throws IOException {
@Test (timeout = 120000)
public void testJobIDtoString() {
- JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
- jid.setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
+ JobId jid = MRBuilderUtils.newJobId(0, 0, 0);
assertEquals("job_0_0000", MRApps.toString(jid));
}
@@ -101,17 +98,16 @@ public void testJobIDShort() {
//TODO_get.set
@Test (timeout = 120000)
public void testTaskIDtoString() {
- TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class);
- tid.setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
- tid.getJobId().setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
- tid.setTaskType(TaskType.MAP);
+ TaskId tid = MRBuilderUtils.newTaskId(MRBuilderUtils.newJobId(0, 0, 0), 0,
+ TaskType.MAP);
TaskType type = tid.getTaskType();
System.err.println(type);
type = TaskType.REDUCE;
System.err.println(type);
System.err.println(tid.getTaskType());
assertEquals("task_0_0000_m_000000", MRApps.toString(tid));
- tid.setTaskType(TaskType.REDUCE);
+ tid = MRBuilderUtils.newTaskId(MRBuilderUtils.newJobId(0, 0, 0), 0,
+ TaskType.REDUCE);
assertEquals("task_0_0000_r_000000", MRApps.toString(tid));
}
@@ -141,11 +137,11 @@ public void testTaskIDBadType() {
//TODO_get.set
@Test (timeout = 120000)
public void testTaskAttemptIDtoString() {
- TaskAttemptId taid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class);
- taid.setTaskId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class));
- taid.getTaskId().setTaskType(TaskType.MAP);
- taid.getTaskId().setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
- taid.getTaskId().getJobId().setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
+ TaskAttemptId taid = MRBuilderUtils.newTaskAttemptId(
+ MRBuilderUtils.newTaskId(
+ MRBuilderUtils.newJobId(0, 0, 0),
+ 0, TaskType.MAP),
+ 0);
assertEquals("attempt_0_0000_m_000000_0", MRApps.toString(taid));
}
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java
index e172be5..ecf5b8f 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java
@@ -21,7 +21,6 @@
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.Task.CombineOutputCollector;
@@ -30,7 +29,6 @@
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.mapreduce.task.reduce.Shuffle;
-import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.Counters.Counter;
import org.apache.hadoop.mapred.MapOutputFile;
import org.apache.hadoop.mapred.JobConf;
@@ -40,7 +38,6 @@
import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
import org.apache.hadoop.mapred.ShuffleConsumerPlugin;
import org.apache.hadoop.mapred.RawKeyValueIterator;
-import org.apache.hadoop.mapred.Reducer;
/**
* A JUnit for testing availability and accessibility of shuffle related API.
@@ -181,10 +178,6 @@ public void testConsumerApi() {
* AuxiliaryService(s) which are "Shuffle-Providers" (ShuffleHandler and 3rd party plugins)
*/
public void testProviderApi() {
-
- ApplicationId mockApplicationId = mock(ApplicationId.class);
- mockApplicationId.setClusterTimestamp(new Long(10));
- mockApplicationId.setId(mock(JobID.class).getId());
LocalDirAllocator mockLocalDirAllocator = mock(LocalDirAllocator.class);
JobConf mockJobConf = mock(JobConf.class);
try {
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
index 2a8affb..84080f1 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java
@@ -44,9 +44,9 @@
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.event.EventHandler;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.service.AbstractService;
import org.apache.hadoop.yarn.service.Service;
+import org.apache.hadoop.yarn.util.BuilderUtils;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -74,10 +74,8 @@
public void init(Configuration conf) throws YarnException {
LOG.info("JobHistory Init");
this.conf = conf;
- this.appID = RecordFactoryProvider.getRecordFactory(conf)
- .newRecordInstance(ApplicationId.class);
- this.appAttemptID = RecordFactoryProvider.getRecordFactory(conf)
- .newRecordInstance(ApplicationAttemptId.class);
+ this.appID = BuilderUtils.newApplicationId(0, 0);
+ this.appAttemptID = BuilderUtils.newApplicationAttemptId(appID, 0);
moveThreadInterval = conf.getLong(
JHAdminConfig.MR_HISTORY_MOVE_INTERVAL_MS,
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
index a283954..14d5630 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
@@ -33,7 +33,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -64,7 +63,6 @@
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.YarnException;
@@ -314,7 +312,7 @@ private synchronized Object invoke(String method, Class argClass,
realProxy = null;
// HS/AMS shut down
maxRetries--;
- lastException = new IOException(e.getMessage());
+ lastException = new IOException(e);
} catch (Exception e) {
LOG.debug("Failed to contact AM/History for job " + jobId
@@ -323,7 +321,7 @@ private synchronized Object invoke(String method, Class argClass,
realProxy = null;
// RM shutdown
maxRetries--;
- lastException = new IOException(e.getMessage());
+ lastException = new IOException(e);
}
}
throw lastException;
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java
index c33ab38..7b8775f 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java
@@ -80,10 +80,9 @@
private ApplicationReport getUnknownApplicationReport() {
- ApplicationId unknownAppId = recordFactory
- .newRecordInstance(ApplicationId.class);
- ApplicationAttemptId unknownAttemptId = recordFactory
- .newRecordInstance(ApplicationAttemptId.class);
+ ApplicationId unknownAppId = BuilderUtils.newApplicationId(0, 0);
+ ApplicationAttemptId unknownAttemptId =
+ BuilderUtils.newApplicationAttemptId(unknownAppId, 0);
// Setting AppState to NEW and finalStatus to UNDEFINED as they are never
// used for a non running job
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java
index cd325a1..0adbe41 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java
@@ -31,13 +31,13 @@
import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
@@ -119,7 +119,7 @@ private ApplicationReport getApplicationReport(
ApplicationResourceUsageReport appResources = Mockito
.mock(ApplicationResourceUsageReport.class);
Mockito.when(appReport.getApplicationId()).thenReturn(
- Records.newRecord(ApplicationId.class));
+ BuilderUtils.newApplicationId(0, 0));
Mockito.when(appResources.getNeededResources()).thenReturn(
Records.newRecord(Resource.class));
Mockito.when(appResources.getReservedResources()).thenReturn(
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
index 5675742..55d8bcc 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
@@ -84,6 +84,7 @@
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.log4j.Appender;
import org.apache.log4j.Layout;
import org.apache.log4j.Logger;
@@ -135,9 +136,7 @@ public ApplicationSubmissionContext answer(InvocationOnMock invocation)
).when(yarnRunner).createApplicationSubmissionContext(any(Configuration.class),
any(String.class), any(Credentials.class));
- appId = recordFactory.newRecordInstance(ApplicationId.class);
- appId.setClusterTimestamp(System.currentTimeMillis());
- appId.setId(1);
+ appId = BuilderUtils.newApplicationId(System.currentTimeMillis(), 1);
jobId = TypeConverter.fromYarn(appId);
if (testWorkDir.exists()) {
FileContext.getLocalFSFileContext().delete(new Path(testWorkDir.toString()), true);
diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
index 0beb430..5585459 100644
--- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
+++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java
@@ -77,6 +77,7 @@
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
import org.apache.hadoop.yarn.service.AbstractService;
+import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
import org.jboss.netty.bootstrap.ServerBootstrap;
@@ -542,9 +543,8 @@ protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch,
// $x/$user/appcache/$appId/output/$mapId
// TODO: Once Shuffle is out of NM, this can use MR APIs to convert between App and Job
JobID jobID = JobID.forName(jobId);
- ApplicationId appID = Records.newRecord(ApplicationId.class);
- appID.setClusterTimestamp(Long.parseLong(jobID.getJtIdentifier()));
- appID.setId(jobID.getId());
+ ApplicationId appID = BuilderUtils.newApplicationId(
+ Long.valueOf(jobID.getJtIdentifier()), jobID.getId());
final String base =
ContainerLocalizer.USERCACHE + "/" + user + "/"
+ ContainerLocalizer.APPCACHE + "/"
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl.java
index 68caaa0..8febe8e 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl.java
@@ -25,12 +25,9 @@
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
-import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ResourceRequestPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
@@ -309,12 +306,12 @@ public void clearReleases() {
this.release.clear();
}
- private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) {
- return new ApplicationAttemptIdPBImpl(p);
+ private ApplicationAttemptId convertFromProtoFormat(ApplicationAttemptIdProto p) {
+ return new ApplicationAttemptId(p);
}
private ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) {
- return ((ApplicationAttemptIdPBImpl)t).getProto();
+ return t.getProto();
}
private ResourceRequestPBImpl convertFromProtoFormat(ResourceRequestProto p) {
@@ -325,11 +322,11 @@ private ResourceRequestProto convertToProtoFormat(ResourceRequest t) {
return ((ResourceRequestPBImpl)t).getProto();
}
- private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) {
- return new ContainerIdPBImpl(p);
+ private ContainerId convertFromProtoFormat(ContainerIdProto p) {
+ return new ContainerId(p);
}
private ContainerIdProto convertToProtoFormat(ContainerId t) {
- return ((ContainerIdPBImpl)t).getProto();
+ return t.getProto();
}
}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java
index 01b3738..a3822ff 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java
@@ -23,7 +23,6 @@
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ProtoBase;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto;
@@ -140,12 +139,12 @@ public void setFinishApplicationStatus(FinalApplicationStatus finishState) {
builder.setFinalApplicationStatus(convertToProtoFormat(finishState));
}
- private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) {
- return new ApplicationAttemptIdPBImpl(p);
+ private ApplicationAttemptId convertFromProtoFormat(ApplicationAttemptIdProto p) {
+ return new ApplicationAttemptId(p);
}
private ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) {
- return ((ApplicationAttemptIdPBImpl)t).getProto();
+ return t.getProto();
}
private FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto s) {
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationReportRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationReportRequestPBImpl.java
index e7190f1..97dcd55 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationReportRequestPBImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationReportRequestPBImpl.java
@@ -22,7 +22,6 @@
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProtoOrBuilder;
@@ -96,12 +95,12 @@ public void setApplicationId(ApplicationId applicationId) {
this.applicationId = applicationId;
}
- private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) {
- return new ApplicationIdPBImpl(p);
+ private ApplicationId convertFromProtoFormat(ApplicationIdProto p) {
+ return new ApplicationId(p);
}
private ApplicationIdProto convertToProtoFormat(ApplicationId t) {
- return ((ApplicationIdPBImpl)t).getProto();
+ return t.getProto();
}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerStatusRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerStatusRequestPBImpl.java
index 33e8ca3..ad6e856 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerStatusRequestPBImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerStatusRequestPBImpl.java
@@ -22,7 +22,6 @@
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusRequest;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
-import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusRequestProtoOrBuilder;
@@ -96,12 +95,12 @@ public void setContainerId(ContainerId containerId) {
this.containerId = containerId;
}
- private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) {
- return new ContainerIdPBImpl(p);
+ private ContainerId convertFromProtoFormat(ContainerIdProto p) {
+ return new ContainerId(p);
}
private ContainerIdProto convertToProtoFormat(ContainerId t) {
- return ((ContainerIdPBImpl)t).getProto();
+ return t.getProto();
}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNewApplicationResponsePBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNewApplicationResponsePBImpl.java
index d15f1b7..c900e4a 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNewApplicationResponsePBImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNewApplicationResponsePBImpl.java
@@ -23,7 +23,6 @@
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
@@ -154,12 +153,12 @@ public void setMinimumResourceCapability(Resource capability) {
this.minimumResourceCapability = capability;
}
- private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) {
- return new ApplicationIdPBImpl(p);
+ private ApplicationId convertFromProtoFormat(ApplicationIdProto p) {
+ return new ApplicationId(p);
}
private ApplicationIdProto convertToProtoFormat(ApplicationId t) {
- return ((ApplicationIdPBImpl)t).getProto();
+ return t.getProto();
}
private Resource convertFromProtoFormat(ResourceProto resource) {
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/KillApplicationRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/KillApplicationRequestPBImpl.java
index e2761a0..2e949c7 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/KillApplicationRequestPBImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/KillApplicationRequestPBImpl.java
@@ -22,7 +22,6 @@
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProtoOrBuilder;
@@ -96,14 +95,11 @@ public void setApplicationId(ApplicationId applicationId) {
this.applicationId = applicationId;
}
- private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) {
- return new ApplicationIdPBImpl(p);
+ private ApplicationId convertFromProtoFormat(ApplicationIdProto p) {
+ return new ApplicationId(p);
}
private ApplicationIdProto convertToProtoFormat(ApplicationId t) {
- return ((ApplicationIdPBImpl)t).getProto();
+ return t.getProto();
}
-
-
-
}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterRequestPBImpl.java
index 1ab2777..bf59d7f 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterRequestPBImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterRequestPBImpl.java
@@ -22,7 +22,6 @@
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProtoOrBuilder;
@@ -54,7 +53,7 @@ public RegisterApplicationMasterRequestProto getProto() {
}
private void mergeLocalToBuilder() {
- if (this.applicationAttemptId != null && !((ApplicationAttemptIdPBImpl)this.applicationAttemptId).getProto().equals(builder.getApplicationAttemptId())) {
+ if (this.applicationAttemptId != null && !((ApplicationAttemptId)this.applicationAttemptId).getProto().equals(builder.getApplicationAttemptId())) {
builder.setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptId));
}
}
@@ -132,12 +131,12 @@ public void setTrackingUrl(String url) {
builder.setTrackingUrl(url);
}
- private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) {
- return new ApplicationAttemptIdPBImpl(p);
+ private ApplicationAttemptId convertFromProtoFormat(ApplicationAttemptIdProto p) {
+ return new ApplicationAttemptId(p);
}
private ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) {
- return ((ApplicationAttemptIdPBImpl)t).getProto();
+ return t.getProto();
}
}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainerRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainerRequestPBImpl.java
index 1aa59cf..62770ef 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainerRequestPBImpl.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainerRequestPBImpl.java
@@ -22,7 +22,6 @@
import org.apache.hadoop.yarn.api.protocolrecords.StopContainerRequest;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
-import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProtoOrBuilder;
@@ -96,12 +95,12 @@ public void setContainerId(ContainerId containerId) {
this.containerId = containerId;
}
- private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) {
- return new ContainerIdPBImpl(p);
+ private ContainerId convertFromProtoFormat(ContainerIdProto p) {
+ return new ContainerId(p);
}
private ContainerIdProto convertToProtoFormat(ContainerId t) {
- return ((ContainerIdPBImpl)t).getProto();
+ return t.getProto();
}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java
index 24f15ce..7e069de 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java
@@ -20,10 +20,9 @@
import java.text.NumberFormat;
-import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
/**
* ApplicationAttemptId denotes the particular attempt
@@ -35,32 +34,46 @@
*/
@Public
@Stable
-public abstract class ApplicationAttemptId implements
+public class ApplicationAttemptId implements
ComparableApplicationId of the ApplicationAttempId.
* @return ApplicationId of the ApplicationAttempId
*/
@Public
@Stable
- public abstract ApplicationId getApplicationId();
-
- @Private
- @Unstable
- public abstract void setApplicationId(ApplicationId appID);
-
+ public int getAttemptId() {
+ return proto.getAttemptId();
+ }
+
/**
* Get the attempt id of the Application.
* @return attempt id of the Application
*/
- public abstract int getAttemptId();
-
- @Private
- @Unstable
- public abstract void setAttemptId(int attemptId);
+ public ApplicationId getApplicationId() {
+ return applicationId;
+ }
static final ThreadLocalApplicationId represents the globally unique
@@ -36,9 +37,18 @@
*/
@Public
@Stable
-public abstract class ApplicationId implements ComparableApplicationId
@@ -48,23 +58,18 @@
*/
@Public
@Stable
- public abstract int getId();
-
- @Private
- @Unstable
- public abstract void setId(int id);
-
+ public int getId() {
+ return proto.getId();
+ }
+
/**
* Get the start time of the ResourceManager which is
* used to generate globally unique ApplicationId.
* @return start time of the ResourceManager
*/
- public abstract long getClusterTimestamp();
-
- @Private
- @Unstable
- public abstract void setClusterTimestamp(long clusterTimestamp);
-
+ public long getClusterTimestamp() {
+ return proto.getClusterTimestamp();
+ }
static final ThreadLocalContainerId represents a globally unique identifier
@@ -31,7 +30,27 @@
*/
@Public
@Stable
-public abstract class ContainerId implements ComparableApplicationAttemptId of the application to which
* the Container was assigned.
@@ -40,23 +59,19 @@
*/
@Public
@Stable
- public abstract ApplicationAttemptId getApplicationAttemptId();
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return applicationAttemptId;
+ }
- @Private
- @Unstable
- public abstract void setApplicationAttemptId(ApplicationAttemptId atId);
-
/**
* Get the identifier of the ContainerId.
* @return identifier of the ContainerId
*/
@Public
@Stable
- public abstract int getId();
-
- @Private
- @Unstable
- public abstract void setId(int id);
+ public int getId() {
+ return proto.getId();
+ }
// TODO: fail the app submission if attempts are more than 10 or something
@@ -135,4 +150,8 @@ public String toString() {
sb.append(containerIdFormat.get().format(getId()));
return sb.toString();
}
+
+ public ContainerIdProto getProto() {
+ return proto;
+ }
}
diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java
index 886f88e..6153056 100644
--- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java
+++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java
@@ -18,10 +18,9 @@
package org.apache.hadoop.yarn.api.records;
-import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
/**
* NodeId is the unique identifier for a node.