diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java index dae9be0..197c7f7 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java @@ -101,6 +101,7 @@ import org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent; import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent; import org.apache.hadoop.mapreduce.v2.util.MRApps; +import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; @@ -510,9 +511,7 @@ public TaskAttemptImpl(TaskId taskId, int i, oldJobId = TypeConverter.fromYarn(taskId.getJobId()); this.conf = conf; this.clock = clock; - attemptId = recordFactory.newRecordInstance(TaskAttemptId.class); - attemptId.setTaskId(taskId); - attemptId.setId(i); + attemptId = MRBuilderUtils.newTaskAttemptId(taskId, i); this.taskAttemptListener = taskAttemptListener; this.appContext = appContext; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java index abb2397..c75dfae 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java @@ -46,7 +46,6 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.util.BuilderUtils; -import org.apache.hadoop.yarn.util.Records; /** * Allocates containers locally. Doesn't allocate a real container; @@ -134,16 +133,13 @@ protected synchronized void heartbeat() throws Exception { public void handle(ContainerAllocatorEvent event) { if (event.getType() == ContainerAllocator.EventType.CONTAINER_REQ) { LOG.info("Processing the event " + event.toString()); - ContainerId cID = recordFactory.newRecordInstance(ContainerId.class); - cID.setApplicationAttemptId(applicationAttemptId); // Assign the same container ID as the AM - cID.setId(this.containerId.getId()); + ContainerId cID = BuilderUtils.newContainerId(applicationAttemptId, + this.containerId.getId()); Container container = recordFactory.newRecordInstance(Container.class); container.setId(cID); - NodeId nodeId = Records.newRecord(NodeId.class); - nodeId.setHost(this.nmHost); - nodeId.setPort(this.nmPort); + NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort); container.setNodeId(nodeId); container.setContainerToken(null); container.setNodeHttpAddress(this.nmHost + ":" + this.nmHttpPort); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java index 3d3994f..44fdc25 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java @@ -318,8 +318,7 @@ public void testSigTermedFunctionality() throws IOException { Job job = Mockito.mock(Job.class); Mockito.when(mockedContext.getJob(jobId)).thenReturn(job); // Make TypeConverter(JobID) pass - ApplicationId mockAppId = Mockito.mock(ApplicationId.class); - Mockito.when(mockAppId.getClusterTimestamp()).thenReturn(1000l); + ApplicationId mockAppId = BuilderUtils.newApplicationId(1000, 0); Mockito.when(jobId.getAppId()).thenReturn(mockAppId); jheh.addToFileMap(jobId); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java index 4a28ab0..e1f0376 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java @@ -92,8 +92,6 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.service.Service; import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachineFactory; @@ -119,18 +117,13 @@ public static int NM_PORT = 1234; public static int NM_HTTP_PORT = 8042; - private static final RecordFactory recordFactory = - RecordFactoryProvider.getRecordFactory(null); - //if true, tasks complete automatically as soon as they are launched protected boolean autoComplete = false; static ApplicationId applicationId; static { - applicationId = recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setClusterTimestamp(0); - applicationId.setId(0); + applicationId = BuilderUtils.newApplicationId(0, 0); } public MRApp(int maps, int reduces, boolean autoComplete, String testName, @@ -152,11 +145,7 @@ protected void downloadTokensAndSetupUGI(Configuration conf) { private static ApplicationAttemptId getApplicationAttemptId( ApplicationId applicationId, int startCount) { - ApplicationAttemptId applicationAttemptId = - recordFactory.newRecordInstance(ApplicationAttemptId.class); - applicationAttemptId.setApplicationId(applicationId); - applicationAttemptId.setAttemptId(startCount); - return applicationAttemptId; + return BuilderUtils.newApplicationAttemptId(applicationId, startCount); } private static ContainerId getContainerId(ApplicationId applicationId, @@ -513,9 +502,8 @@ protected ContainerAllocator createContainerAllocator( @Override public void handle(ContainerAllocatorEvent event) { - ContainerId cId = recordFactory.newRecordInstance(ContainerId.class); - cId.setApplicationAttemptId(getContext().getApplicationAttemptId()); - cId.setId(containerCount++); + ContainerId cId = BuilderUtils.newContainerId( + getContext().getApplicationAttemptId(), containerCount++); NodeId nodeId = BuilderUtils.newNodeId(NM_HOST, NM_PORT); Container container = BuilderUtils.newContainer(cId, nodeId, NM_HOST + ":" + NM_HTTP_PORT, null, null, null); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java index efb8b7a..4d27304 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java @@ -139,19 +139,14 @@ public void run() { try { if (concurrentRunningTasks < maxConcurrentRunningTasks) { event = eventQueue.take(); - ContainerId cId = - recordFactory.newRecordInstance(ContainerId.class); - cId.setApplicationAttemptId( - getContext().getApplicationAttemptId()); - cId.setId(containerCount++); + ContainerId cId = BuilderUtils.newContainerId( + getContext().getApplicationAttemptId(), containerCount++); //System.out.println("Allocating " + containerCount); Container container = recordFactory.newRecordInstance(Container.class); container.setId(cId); - NodeId nodeId = recordFactory.newRecordInstance(NodeId.class); - nodeId.setHost("dummy"); - nodeId.setPort(1234); + NodeId nodeId = BuilderUtils.newNodeId("dummy", 1234); container.setNodeId(nodeId); container.setContainerToken(null); container.setNodeHttpAddress("localhost:8042"); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java index c1cddb8..d476446 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java @@ -147,10 +147,7 @@ public static String newJobName() { } public static JobId newJobID(ApplicationId appID, int i) { - JobId id = Records.newRecord(JobId.class); - id.setAppId(appID); - id.setId(i); - return id; + return MRBuilderUtils.newJobId(appID, i); } public static JobReport newJobReport(JobId id) { @@ -226,9 +223,7 @@ public static Counters newCounters() { } public static TaskAttempt newTaskAttempt(TaskId tid, int i) { - final TaskAttemptId taid = Records.newRecord(TaskAttemptId.class); - taid.setTaskId(tid); - taid.setId(i); + final TaskAttemptId taid = MRBuilderUtils.newTaskAttemptId(tid, i); final TaskAttemptReport report = newTaskAttemptReport(taid); final List diags = Lists.newArrayList(); diags.add(DIAGS.next()); @@ -299,13 +294,10 @@ public boolean isFinished() { @Override public ContainerId getAssignedContainerID() { - ContainerId id = Records.newRecord(ContainerId.class); - ApplicationAttemptId appAttemptId = Records - .newRecord(ApplicationAttemptId.class); - appAttemptId.setApplicationId(taid.getTaskId().getJobId().getAppId()); - appAttemptId.setAttemptId(0); - id.setApplicationAttemptId(appAttemptId); - return id; + ApplicationAttemptId appAttemptId = + BuilderUtils.newApplicationAttemptId( + taid.getTaskId().getJobId().getAppId(), 0); + return BuilderUtils.newContainerId(appAttemptId, 0); } @Override @@ -350,10 +342,7 @@ public String getNodeRackName() { } public static Task newTask(JobId jid, int i, int m, final boolean hasFailedTasks) { - final TaskId tid = Records.newRecord(TaskId.class); - tid.setJobId(jid); - tid.setId(i); - tid.setTaskType(TASK_TYPES.next()); + final TaskId tid = MRBuilderUtils.newTaskId(jid, i, TASK_TYPES.next()); final TaskReport report = newTaskReport(tid); final Map attempts = newTaskAttempts(tid, m); return new Task() { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java index 0e20d6f..9860d5a 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java @@ -60,6 +60,7 @@ import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator; import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent; import org.apache.hadoop.mapreduce.v2.app.speculate.TaskRuntimeEstimator; +import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.yarn.Clock; @@ -71,9 +72,8 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.service.CompositeService; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Assert; import org.junit.Test; @@ -111,8 +111,6 @@ = new AtomicInteger(0); private final AtomicLong taskTimeSavedBySpeculation = new AtomicLong(0L); - - private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); private void coreTestEstimator (TaskRuntimeEstimator testedEstimator, int expectedSpeculations) { @@ -268,10 +266,7 @@ void addAttempt(Task task) { = new ConcurrentHashMap(4); MyTaskImpl(JobId jobID, int index, TaskType type) { - taskID = recordFactory.newRecordInstance(TaskId.class); - taskID.setId(index); - taskID.setTaskType(type); - taskID.setJobId(jobID); + taskID = MRBuilderUtils.newTaskId(jobID, index, type); } void addAttempt() { @@ -520,9 +515,7 @@ public Configuration loadConfFile() { TaskAttemptState overridingState = TaskAttemptState.NEW; MyTaskAttemptImpl(TaskId taskID, int index, Clock clock) { - myAttemptID = recordFactory.newRecordInstance(TaskAttemptId.class); - myAttemptID.setId(index); - myAttemptID.setTaskId(taskID); + myAttemptID = MRBuilderUtils.newTaskAttemptId(taskID, index); } void startUp() { @@ -789,16 +782,9 @@ public MyAppMaster(Clock clock) { private final Map allJobs; MyAppContext(int numberMaps, int numberReduces) { - myApplicationID = recordFactory.newRecordInstance(ApplicationId.class); - myApplicationID.setClusterTimestamp(clock.getTime()); - myApplicationID.setId(1); - - myAppAttemptID = recordFactory - .newRecordInstance(ApplicationAttemptId.class); - myAppAttemptID.setApplicationId(myApplicationID); - myAppAttemptID.setAttemptId(0); - myJobID = recordFactory.newRecordInstance(JobId.class); - myJobID.setAppId(myApplicationID); + myApplicationID = BuilderUtils.newApplicationId(clock.getTime(), 1); + myAppAttemptID = BuilderUtils.newApplicationAttemptId(myApplicationID, 0); + myJobID = MRBuilderUtils.newJobId(myApplicationID, 0); Job myJob = new MyJobImpl(myJobID, numberMaps, numberReduces); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java index b278186..32bb621 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java @@ -34,7 +34,6 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.MRJobConfig; -import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobState; import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.job.Job; @@ -49,8 +48,6 @@ import org.apache.hadoop.yarn.YarnException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Test; @@ -65,8 +62,6 @@ private FileSystem fs; private String stagingJobDir = "tmpJobDir"; private Path stagingJobPath = new Path(stagingJobDir); - private final static RecordFactory recordFactory = RecordFactoryProvider. - getRecordFactory(null); @Test public void testDeletionofStaging() throws IOException { @@ -77,15 +72,10 @@ public void testDeletionofStaging() throws IOException { String user = UserGroupInformation.getCurrentUser().getShortUserName(); Path stagingDir = MRApps.getStagingAreaDir(conf, user); when(fs.exists(stagingDir)).thenReturn(true); - ApplicationAttemptId attemptId = recordFactory.newRecordInstance( - ApplicationAttemptId.class); - attemptId.setAttemptId(0); - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(0); - attemptId.setApplicationId(appId); - JobId jobid = recordFactory.newRecordInstance(JobId.class); - jobid.setAppId(appId); + ApplicationId appId = + BuilderUtils.newApplicationId(System.currentTimeMillis(), 0); + ApplicationAttemptId attemptId = + BuilderUtils.newApplicationAttemptId(appId, 0); MRAppMaster appMaster = new TestMRApp(attemptId); appMaster.init(conf); appMaster.shutDownJob(); @@ -101,15 +91,10 @@ public void testDeletionofStagingOnKill() throws IOException { String user = UserGroupInformation.getCurrentUser().getShortUserName(); Path stagingDir = MRApps.getStagingAreaDir(conf, user); when(fs.exists(stagingDir)).thenReturn(true); - ApplicationAttemptId attemptId = recordFactory.newRecordInstance( - ApplicationAttemptId.class); - attemptId.setAttemptId(0); - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(0); - attemptId.setApplicationId(appId); - JobId jobid = recordFactory.newRecordInstance(JobId.class); - jobid.setAppId(appId); + ApplicationId appId = + BuilderUtils.newApplicationId(System.currentTimeMillis(), 0); + ApplicationAttemptId attemptId = + BuilderUtils.newApplicationAttemptId(appId, 0); ContainerAllocator mockAlloc = mock(ContainerAllocator.class); MRAppMaster appMaster = new TestMRApp(attemptId, mockAlloc, 4); appMaster.init(conf); @@ -129,15 +114,10 @@ public void testDeletionofStagingOnKillLastTry() throws IOException { String user = UserGroupInformation.getCurrentUser().getShortUserName(); Path stagingDir = MRApps.getStagingAreaDir(conf, user); when(fs.exists(stagingDir)).thenReturn(true); - ApplicationAttemptId attemptId = recordFactory.newRecordInstance( - ApplicationAttemptId.class); - attemptId.setAttemptId(1); - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(0); - attemptId.setApplicationId(appId); - JobId jobid = recordFactory.newRecordInstance(JobId.class); - jobid.setAppId(appId); + ApplicationId appId = + BuilderUtils.newApplicationId(System.currentTimeMillis(), 0); + ApplicationAttemptId attemptId = + BuilderUtils.newApplicationAttemptId(appId, 1); ContainerAllocator mockAlloc = mock(ContainerAllocator.class); MRAppMaster appMaster = new TestMRApp(attemptId, mockAlloc, MRJobConfig.DEFAULT_MR_AM_MAX_ATTEMPTS); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java index 8cfbe03..da93ab6 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java @@ -69,12 +69,11 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachineFactory; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; -import org.apache.hadoop.yarn.util.Records; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; @@ -421,8 +420,8 @@ public void testReportDiagnostics() throws Exception { final JobDiagnosticsUpdateEvent diagUpdateEvent = new JobDiagnosticsUpdateEvent(jobId, diagMsg); MRAppMetrics mrAppMetrics = MRAppMetrics.create(); - JobImpl job = new JobImpl(jobId, Records - .newRecord(ApplicationAttemptId.class), new Configuration(), + JobImpl job = new JobImpl(jobId, BuilderUtils.newApplicationAttemptId( + BuilderUtils.newApplicationId(0, 0), 0), new Configuration(), mock(EventHandler.class), null, mock(JobTokenSecretManager.class), null, new SystemClock(), null, @@ -432,8 +431,8 @@ public void testReportDiagnostics() throws Exception { Assert.assertNotNull(diagnostics); Assert.assertTrue(diagnostics.contains(diagMsg)); - job = new JobImpl(jobId, Records - .newRecord(ApplicationAttemptId.class), new Configuration(), + job = new JobImpl(jobId, BuilderUtils.newApplicationAttemptId( + BuilderUtils.newApplicationId(0, 0), 0), new Configuration(), mock(EventHandler.class), null, mock(JobTokenSecretManager.class), null, new SystemClock(), null, @@ -489,8 +488,8 @@ private boolean testUberDecision(Configuration conf) { JobID jobID = JobID.forName("job_1234567890000_0001"); JobId jobId = TypeConverter.toYarn(jobID); MRAppMetrics mrAppMetrics = MRAppMetrics.create(); - JobImpl job = new JobImpl(jobId, Records - .newRecord(ApplicationAttemptId.class), conf, mock(EventHandler.class), + JobImpl job = new JobImpl(jobId, BuilderUtils.newApplicationAttemptId( + BuilderUtils.newApplicationId(0, 0), 0), conf, mock(EventHandler.class), null, new JobTokenSecretManager(), new Credentials(), null, null, mrAppMetrics, true, null, 0, null, null, null, null); InitTransition initTransition = getInitTransition(2); @@ -581,7 +580,7 @@ private static StubbedJob createStubbedJob(Configuration conf, JobID jobID = JobID.forName("job_1234567890000_0001"); JobId jobId = TypeConverter.toYarn(jobID); StubbedJob job = new StubbedJob(jobId, - Records.newRecord(ApplicationAttemptId.class), conf, + BuilderUtils.newApplicationAttemptId(BuilderUtils.newApplicationId(0, 0), 0), conf, dispatcher.getEventHandler(), true, "somebody", numSplits); dispatcher.register(JobEventType.class, job); EventHandler mockHandler = mock(EventHandler.class); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java index d3297b3..c575a72 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java @@ -56,6 +56,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent; import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; +import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.Clock; @@ -63,7 +64,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.InlineDispatcher; -import org.apache.hadoop.yarn.util.Records; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -228,13 +229,9 @@ public void setup() { metrics = mock(MRAppMetrics.class); dataLocations = new String[1]; - appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(1); + appId = BuilderUtils.newApplicationId(System.currentTimeMillis(), 1); - jobId = Records.newRecord(JobId.class); - jobId.setId(1); - jobId.setAppId(appId); + jobId = MRBuilderUtils.newJobId(appId, 1); appContext = mock(AppContext.class); taskSplitMetaInfo = mock(TaskSplitMetaInfo.class); @@ -257,11 +254,7 @@ public void teardown() { } private TaskId getNewTaskID() { - TaskId taskId = Records.newRecord(TaskId.class); - taskId.setId(++taskCounter); - taskId.setJobId(jobId); - taskId.setTaskType(mockTask.getType()); - return taskId; + return MRBuilderUtils.newTaskId(jobId, ++taskCounter, mockTask.getType()); } private void scheduleTaskAttempt(TaskId taskId) { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java index 4fcb475..31e48e7 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java @@ -26,8 +26,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.v2.app.AppContext; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.Records; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.webapp.Controller.RequestContext; import org.junit.Before; import org.junit.Test; @@ -41,7 +40,7 @@ public void setUp() { AppContext context = mock(AppContext.class); when(context.getApplicationID()).thenReturn( - Records.newRecord(ApplicationId.class)); + BuilderUtils.newApplicationId(0, 0)); App app = new App(context); Configuration conf = new Configuration(); ctx = mock(RequestContext.class); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml index c7710f9..a615b2d 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml @@ -43,6 +43,10 @@ org.apache.hadoop + hadoop-yarn-api + + + org.apache.hadoop hadoop-mapreduce-client-core diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java index d319a7f..c33b2ce 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java @@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.util.MRApps; +import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.yarn.YarnException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; @@ -52,6 +53,7 @@ import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +import org.apache.hadoop.yarn.util.BuilderUtils; public class TypeConverter { @@ -73,14 +75,9 @@ } public static JobId toYarn(org.apache.hadoop.mapreduce.JobID id) { - JobId jobId = recordFactory.newRecordInstance(JobId.class); - jobId.setId(id.getId()); //currently there is 1-1 mapping between appid and jobid - - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setId(id.getId()); - appId.setClusterTimestamp(toClusterTimeStamp(id.getJtIdentifier())); - jobId.setAppId(appId); - return jobId; + ApplicationId appId = BuilderUtils.newApplicationId( + toClusterTimeStamp(id.getJtIdentifier()), id.getId()); + return MRBuilderUtils.newJobId(appId, id.getId()); } private static String fromClusterTimeStamp(long clusterTimeStamp) { @@ -121,11 +118,8 @@ private static long toClusterTimeStamp(String identifier) { } public static TaskId toYarn(org.apache.hadoop.mapreduce.TaskID id) { - TaskId taskId = recordFactory.newRecordInstance(TaskId.class); - taskId.setId(id.getId()); - taskId.setTaskType(toYarn(id.getTaskType())); - taskId.setJobId(toYarn(id.getJobID())); - return taskId; + return MRBuilderUtils.newTaskId(toYarn(id.getJobID()), id.getId(), + toYarn(id.getTaskType())); } public static TaskAttemptState toYarn( @@ -214,18 +208,12 @@ public static TaskCompletionEvent fromYarn( public static TaskAttemptId toYarn( org.apache.hadoop.mapred.TaskAttemptID id) { - TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class); - taskAttemptId.setTaskId(toYarn(id.getTaskID())); - taskAttemptId.setId(id.getId()); - return taskAttemptId; + return MRBuilderUtils.newTaskAttemptId(toYarn(id.getTaskID()), id.getId()); } public static TaskAttemptId toYarn( org.apache.hadoop.mapreduce.TaskAttemptID id) { - TaskAttemptId taskAttemptId = recordFactory.newRecordInstance(TaskAttemptId.class); - taskAttemptId.setTaskId(toYarn(id.getTaskID())); - taskAttemptId.setId(id.getId()); - return taskAttemptId; + return MRBuilderUtils.newTaskAttemptId(toYarn(id.getTaskID()), id.getId()); } public static org.apache.hadoop.mapreduce.Counters fromYarn( diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/FailTaskAttemptRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/FailTaskAttemptRequestPBImpl.java index ac7edb2..4fbf130 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/FailTaskAttemptRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/FailTaskAttemptRequestPBImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setTaskAttemptId(TaskAttemptId taskAttemptId) { this.taskAttemptId = taskAttemptId; } - private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) { - return new TaskAttemptIdPBImpl(p); + private TaskAttemptId convertFromProtoFormat(TaskAttemptIdProto p) { + return new TaskAttemptId(p); } private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) { - return ((TaskAttemptIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetCountersRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetCountersRequestPBImpl.java index 13ec533..b2a711d 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetCountersRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetCountersRequestPBImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest; import org.apache.hadoop.mapreduce.v2.api.records.JobId; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setJobId(JobId jobId) { this.jobId = jobId; } - private JobIdPBImpl convertFromProtoFormat(JobIdProto p) { - return new JobIdPBImpl(p); + private JobId convertFromProtoFormat(JobIdProto p) { + return new JobId(p); } private JobIdProto convertToProtoFormat(JobId t) { - return ((JobIdPBImpl)t).getProto(); + return ((JobId)t).getProto(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetDiagnosticsRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetDiagnosticsRequestPBImpl.java index 19855ca..c12ad57 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetDiagnosticsRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetDiagnosticsRequestPBImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setTaskAttemptId(TaskAttemptId taskAttemptId) { this.taskAttemptId = taskAttemptId; } - private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) { - return new TaskAttemptIdPBImpl(p); + private TaskAttemptId convertFromProtoFormat(TaskAttemptIdProto p) { + return new TaskAttemptId(p); } private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) { - return ((TaskAttemptIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetJobReportRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetJobReportRequestPBImpl.java index b5662c2..c4cd10b 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetJobReportRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetJobReportRequestPBImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest; import org.apache.hadoop.mapreduce.v2.api.records.JobId; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setJobId(JobId jobId) { this.jobId = jobId; } - private JobIdPBImpl convertFromProtoFormat(JobIdProto p) { - return new JobIdPBImpl(p); + private JobId convertFromProtoFormat(JobIdProto p) { + return new JobId(p); } private JobIdProto convertToProtoFormat(JobId t) { - return ((JobIdPBImpl)t).getProto(); + return ((JobId)t).getProto(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskAttemptCompletionEventsRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskAttemptCompletionEventsRequestPBImpl.java index 71c957f..c799223 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskAttemptCompletionEventsRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskAttemptCompletionEventsRequestPBImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest; import org.apache.hadoop.mapreduce.v2.api.records.JobId; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProtoOrBuilder; @@ -118,12 +117,12 @@ public void setMaxEvents(int maxEvents) { builder.setMaxEvents((maxEvents)); } - private JobIdPBImpl convertFromProtoFormat(JobIdProto p) { - return new JobIdPBImpl(p); + private JobId convertFromProtoFormat(JobIdProto p) { + return new JobId(p); } private JobIdProto convertToProtoFormat(JobId t) { - return ((JobIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskAttemptReportRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskAttemptReportRequestPBImpl.java index 84864fb..addbfb5 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskAttemptReportRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskAttemptReportRequestPBImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setTaskAttemptId(TaskAttemptId taskAttemptId) { this.taskAttemptId = taskAttemptId; } - private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) { - return new TaskAttemptIdPBImpl(p); + private TaskAttemptId convertFromProtoFormat(TaskAttemptIdProto p) { + return new TaskAttemptId(p); } private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) { - return ((TaskAttemptIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskReportRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskReportRequestPBImpl.java index 288e101..145eb50 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskReportRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskReportRequestPBImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setTaskId(TaskId taskId) { this.taskId = taskId; } - private TaskIdPBImpl convertFromProtoFormat(TaskIdProto p) { - return new TaskIdPBImpl(p); + private TaskId convertFromProtoFormat(TaskIdProto p) { + return new TaskId(p); } private TaskIdProto convertToProtoFormat(TaskId t) { - return ((TaskIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskReportsRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskReportsRequestPBImpl.java index 73af839..e19eb28 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskReportsRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/GetTaskReportsRequestPBImpl.java @@ -22,7 +22,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto; @@ -117,12 +116,12 @@ public void setTaskType(TaskType taskType) { builder.setTaskType(convertToProtoFormat(taskType)); } - private JobIdPBImpl convertFromProtoFormat(JobIdProto p) { - return new JobIdPBImpl(p); + private JobId convertFromProtoFormat(JobIdProto p) { + return new JobId(p); } private JobIdProto convertToProtoFormat(JobId t) { - return ((JobIdPBImpl)t).getProto(); + return t.getProto(); } private TaskTypeProto convertToProtoFormat(TaskType e) { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillJobRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillJobRequestPBImpl.java index a50d88b..f81d725 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillJobRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillJobRequestPBImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobRequest; import org.apache.hadoop.mapreduce.v2.api.records.JobId; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setJobId(JobId jobId) { this.jobId = jobId; } - private JobIdPBImpl convertFromProtoFormat(JobIdProto p) { - return new JobIdPBImpl(p); + private JobId convertFromProtoFormat(JobIdProto p) { + return new JobId(p); } private JobIdProto convertToProtoFormat(JobId t) { - return ((JobIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillTaskAttemptRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillTaskAttemptRequestPBImpl.java index 8fc0936..933cb26 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillTaskAttemptRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillTaskAttemptRequestPBImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setTaskAttemptId(TaskAttemptId taskAttemptId) { this.taskAttemptId = taskAttemptId; } - private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) { - return new TaskAttemptIdPBImpl(p); + private TaskAttemptId convertFromProtoFormat(TaskAttemptIdProto p) { + return new TaskAttemptId(p); } private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) { - return ((TaskAttemptIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillTaskRequestPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillTaskRequestPBImpl.java index 12594df..21180a3 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillTaskRequestPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/impl/pb/KillTaskRequestPBImpl.java @@ -21,7 +21,6 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskRequest; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; -import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl; import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto; import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setTaskId(TaskId taskId) { this.taskId = taskId; } - private TaskIdPBImpl convertFromProtoFormat(TaskIdProto p) { - return new TaskIdPBImpl(p); + private TaskId convertFromProtoFormat(TaskIdProto p) { + return new TaskId(p); } private TaskIdProto convertToProtoFormat(TaskId t) { - return ((TaskIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobId.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobId.java index 1f330dc..9d0c2a6 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobId.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/JobId.java @@ -19,6 +19,8 @@ package org.apache.hadoop.mapreduce.v2.api.records; import java.text.NumberFormat; + +import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto; import org.apache.hadoop.yarn.api.records.ApplicationId; /** @@ -30,15 +32,34 @@ * start-time of the ResourceManager along with a monotonically * increasing counter for the jobId.

*/ -public abstract class JobId implements Comparable { +public class JobId implements Comparable { + private final JobIdProto proto; + private final ApplicationId applicationId; + + public JobId() { + this(JobIdProto.getDefaultInstance()); + } + public JobId(JobIdProto proto) { + this.proto = proto; + this.applicationId = new ApplicationId(proto.getAppId()); + } + + public JobId(JobIdProto.Builder builder, ApplicationId appId) { + this.applicationId = appId; + builder.setAppId(appId.getProto()); + this.proto = builder.build(); + } + /** * Get the associated ApplicationId which represents the * start time of the ResourceManager and is used to generate * the globally unique JobId. * @return associated ApplicationId */ - public abstract ApplicationId getAppId(); + public ApplicationId getAppId() { + return applicationId; + } /** * Get the short integer identifier of the JobId @@ -46,11 +67,9 @@ * of the ResourceManager. * @return short integer identifier of the JobId */ - public abstract int getId(); - - public abstract void setAppId(ApplicationId appId); - public abstract void setId(int id); - + public int getId() { + return proto.getId(); + } protected static final String JOB = "job"; protected static final char SEPARATOR = '_'; @@ -109,4 +128,8 @@ public int compareTo(JobId other) { return appIdComp; } } + + public JobIdProto getProto() { + return proto; + } } \ No newline at end of file diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java index bae9099..0733bd6 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskAttemptId.java @@ -18,6 +18,8 @@ package org.apache.hadoop.mapreduce.v2.api.records; +import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto; + /** *

* TaskAttemptId represents the unique identifier for a task @@ -31,20 +33,38 @@ * attempt number. *

*/ -public abstract class TaskAttemptId implements Comparable { +public class TaskAttemptId implements Comparable { + private final TaskAttemptIdProto proto; + private final TaskId taskId; + + public TaskAttemptId() { + this(TaskAttemptIdProto.getDefaultInstance()); + } + + public TaskAttemptId(TaskAttemptIdProto proto) { + this.proto = proto; + this.taskId = new TaskId(proto.getTaskId()); + } + + public TaskAttemptId(TaskAttemptIdProto.Builder builder, TaskId taskId) { + this.taskId = taskId; + builder.setTaskId(taskId.getProto()); + this.proto = builder.build(); + } + /** * @return the associated TaskId. */ - public abstract TaskId getTaskId(); + public TaskId getTaskId() { + return taskId; + } /** * @return the attempt id. */ - public abstract int getId(); - - public abstract void setTaskId(TaskId taskId); - - public abstract void setId(int id); + public int getId() { + return proto.getId(); + } protected static final String TASKATTEMPT = "attempt"; @@ -101,4 +121,8 @@ public int compareTo(TaskAttemptId other) { return taskIdComp; } } + + public TaskAttemptIdProto getProto() { + return proto; + } } \ No newline at end of file diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskId.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskId.java index 09f5e88..88e8897 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskId.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskId.java @@ -20,6 +20,10 @@ import java.text.NumberFormat; +import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto; +import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto; +import org.apache.hadoop.mapreduce.v2.util.MRProtoUtils; + /** *

* TaskId represents the unique identifier for a Map or Reduce @@ -33,28 +37,45 @@ * task number. *

*/ -public abstract class TaskId implements Comparable { +public class TaskId implements Comparable { + private final TaskIdProto proto; + private final JobId jobId; + + public TaskId() { + this(TaskIdProto.getDefaultInstance()); + } + public TaskId(TaskIdProto proto) { + this.proto = proto; + this.jobId = new JobId(proto.getJobId()); + } + + public TaskId(TaskIdProto.Builder builder, JobId jobId) { + builder.setJobId(jobId.getProto()); + this.jobId = jobId; + this.proto = builder.build(); + } + /** * @return the associated JobId */ - public abstract JobId getJobId(); + public JobId getJobId() { + return jobId; + } /** * @return the type of the task - MAP/REDUCE */ - public abstract TaskType getTaskType(); + public TaskType getTaskType() { + return convertFromProtoFormat(proto.getTaskType()); + } /** * @return the task number. */ - public abstract int getId(); - - public abstract void setJobId(JobId jobId); - - public abstract void setTaskType(TaskType taskType); - - public abstract void setId(int id); + public int getId() { + return proto.getId(); + } protected static final String TASK = "task"; @@ -123,4 +144,12 @@ public int compareTo(TaskId other) { return jobIdComp; } } + + public TaskIdProto getProto() { + return proto; + } + + private TaskType convertFromProtoFormat(TaskTypeProto e) { + return MRProtoUtils.convertFromProtoFormat(e); + } } \ No newline at end of file diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/AMInfoPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/AMInfoPBImpl.java index 325d9a8..d2b0815 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/AMInfoPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/AMInfoPBImpl.java @@ -24,8 +24,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; @@ -56,12 +54,12 @@ public synchronized AMInfoProto getProto() { private synchronized void mergeLocalToBuilder() { if (this.appAttemptId != null - && !((ApplicationAttemptIdPBImpl) this.appAttemptId).getProto().equals( + && !((ApplicationAttemptId) this.appAttemptId).getProto().equals( builder.getApplicationAttemptId())) { builder.setApplicationAttemptId(convertToProtoFormat(this.appAttemptId)); } if (this.getContainerId() != null - && !((ContainerIdPBImpl) this.containerId).getProto().equals( + && !((ContainerId) this.containerId).getProto().equals( builder.getContainerId())) { builder.setContainerId(convertToProtoFormat(this.containerId)); } @@ -181,21 +179,21 @@ public synchronized void setNodeManagerHttpPort(int httpPort) { builder.setNodeManagerHttpPort(httpPort); } - private ApplicationAttemptIdPBImpl convertFromProtoFormat( + private ApplicationAttemptId convertFromProtoFormat( ApplicationAttemptIdProto p) { - return new ApplicationAttemptIdPBImpl(p); + return new ApplicationAttemptId(p); } - private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { - return new ContainerIdPBImpl(p); + private ContainerId convertFromProtoFormat(ContainerIdProto p) { + return new ContainerId(p); } private ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) { - return ((ApplicationAttemptIdPBImpl) t).getProto(); + return t.getProto(); } private ContainerIdProto convertToProtoFormat(ContainerId t) { - return ((ContainerIdPBImpl) t).getProto(); + return t.getProto(); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java deleted file mode 100644 index 4fc73bd..0000000 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobIdPBImpl.java +++ /dev/null @@ -1,117 +0,0 @@ -/** -* Licensed to the Apache Software Foundation (ASF) under one -* or more contributor license agreements. See the NOTICE file -* distributed with this work for additional information -* regarding copyright ownership. The ASF licenses this file -* to you under the Apache License, Version 2.0 (the -* "License"); you may not use this file except in compliance -* with the License. You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package org.apache.hadoop.mapreduce.v2.api.records.impl.pb; - -import org.apache.hadoop.mapreduce.v2.api.records.JobId; -import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto; -import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProtoOrBuilder; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; - -public class JobIdPBImpl extends JobId { - - JobIdProto proto = JobIdProto.getDefaultInstance(); - JobIdProto.Builder builder = null; - boolean viaProto = false; - - private ApplicationId applicationId = null; - - public JobIdPBImpl() { - builder = JobIdProto.newBuilder(); - } - - public JobIdPBImpl(JobIdProto proto) { - this.proto = proto; - viaProto = true; - } - - public synchronized JobIdProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - private synchronized void mergeLocalToBuilder() { - if (this.applicationId != null - && !((ApplicationIdPBImpl) this.applicationId).getProto().equals( - builder.getAppId())) { - builder.setAppId(convertToProtoFormat(this.applicationId)); - } - } - - private synchronized void mergeLocalToProto() { - if (viaProto) - maybeInitBuilder(); - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private synchronized void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = JobIdProto.newBuilder(proto); - } - viaProto = false; - } - - - @Override - public synchronized ApplicationId getAppId() { - JobIdProtoOrBuilder p = viaProto ? proto : builder; - if (applicationId != null) { - return applicationId; - } // Else via proto - if (!p.hasAppId()) { - return null; - } - applicationId = convertFromProtoFormat(p.getAppId()); - return applicationId; - } - - @Override - public synchronized void setAppId(ApplicationId appId) { - maybeInitBuilder(); - if (appId == null) { - builder.clearAppId(); - } - this.applicationId = appId; - } - @Override - public synchronized int getId() { - JobIdProtoOrBuilder p = viaProto ? proto : builder; - return (p.getId()); - } - - @Override - public synchronized void setId(int id) { - maybeInitBuilder(); - builder.setId((id)); - } - - private ApplicationIdPBImpl convertFromProtoFormat( - ApplicationIdProto p) { - return new ApplicationIdPBImpl(p); - } - - private ApplicationIdProto convertToProtoFormat(ApplicationId t) { - return ((ApplicationIdPBImpl) t).getProto(); - } -} \ No newline at end of file diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java index 1b16c86..2413f0f 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/JobReportPBImpl.java @@ -317,12 +317,12 @@ private AMInfoProto convertToProtoFormat(AMInfo t) { return ((AMInfoPBImpl)t).getProto(); } - private JobIdPBImpl convertFromProtoFormat(JobIdProto p) { - return new JobIdPBImpl(p); + private JobId convertFromProtoFormat(JobIdProto p) { + return new JobId(p); } private JobIdProto convertToProtoFormat(JobId t) { - return ((JobIdPBImpl)t).getProto(); + return t.getProto(); } private JobStateProto convertToProtoFormat(JobState e) { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptCompletionEventPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptCompletionEventPBImpl.java index d6c8589..8464398 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptCompletionEventPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptCompletionEventPBImpl.java @@ -156,12 +156,12 @@ public void setEventId(int eventId) { builder.setEventId((eventId)); } - private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) { - return new TaskAttemptIdPBImpl(p); + private TaskAttemptId convertFromProtoFormat(TaskAttemptIdProto p) { + return new TaskAttemptId(p); } private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) { - return ((TaskAttemptIdPBImpl)t).getProto(); + return t.getProto(); } private TaskAttemptCompletionEventStatusProto convertToProtoFormat(TaskAttemptCompletionEventStatus e) { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java deleted file mode 100644 index 9a82482..0000000 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptIdPBImpl.java +++ /dev/null @@ -1,114 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.mapreduce.v2.api.records.impl.pb; - -import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; -import org.apache.hadoop.mapreduce.v2.api.records.TaskId; -import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto; -import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProtoOrBuilder; -import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto; - -public class TaskAttemptIdPBImpl extends TaskAttemptId { - TaskAttemptIdProto proto = TaskAttemptIdProto.getDefaultInstance(); - TaskAttemptIdProto.Builder builder = null; - boolean viaProto = false; - - private TaskId taskId = null; - - - - public TaskAttemptIdPBImpl() { - builder = TaskAttemptIdProto.newBuilder(); - } - - public TaskAttemptIdPBImpl(TaskAttemptIdProto proto) { - this.proto = proto; - viaProto = true; - } - - public synchronized TaskAttemptIdProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - private synchronized void mergeLocalToBuilder() { - if (this.taskId != null - && !((TaskIdPBImpl) this.taskId).getProto().equals(builder.getTaskId())) { - builder.setTaskId(convertToProtoFormat(this.taskId)); - } - } - - private synchronized void mergeLocalToProto() { - if (viaProto) - maybeInitBuilder(); - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private synchronized void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = TaskAttemptIdProto.newBuilder(proto); - } - viaProto = false; - } - - - @Override - public synchronized int getId() { - TaskAttemptIdProtoOrBuilder p = viaProto ? proto : builder; - return (p.getId()); - } - - @Override - public synchronized void setId(int id) { - maybeInitBuilder(); - builder.setId((id)); - } - @Override - public synchronized TaskId getTaskId() { - TaskAttemptIdProtoOrBuilder p = viaProto ? proto : builder; - if (this.taskId != null) { - return this.taskId; - } - if (!p.hasTaskId()) { - return null; - } - taskId = convertFromProtoFormat(p.getTaskId()); - return taskId; - } - - @Override - public synchronized void setTaskId(TaskId taskId) { - maybeInitBuilder(); - if (taskId == null) - builder.clearTaskId(); - this.taskId = taskId; - } - - private TaskIdPBImpl convertFromProtoFormat(TaskIdProto p) { - return new TaskIdPBImpl(p); - } - - private TaskIdProto convertToProtoFormat(TaskId t) { - return ((TaskIdPBImpl)t).getProto(); - } -} \ No newline at end of file diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptReportPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptReportPBImpl.java index 999d770..8014bd2 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptReportPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskAttemptReportPBImpl.java @@ -33,7 +33,6 @@ import org.apache.hadoop.mapreduce.v2.util.MRProtoUtils; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; @@ -329,11 +328,11 @@ public void setContainerId(ContainerId containerId) { } private ContainerIdProto convertToProtoFormat(ContainerId t) { - return ((ContainerIdPBImpl)t).getProto(); + return t.getProto(); } - private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { - return new ContainerIdPBImpl(p); + private ContainerId convertFromProtoFormat(ContainerIdProto p) { + return new ContainerId(p); } private CountersPBImpl convertFromProtoFormat(CountersProto p) { @@ -344,12 +343,12 @@ private CountersProto convertToProtoFormat(Counters t) { return ((CountersPBImpl)t).getProto(); } - private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) { - return new TaskAttemptIdPBImpl(p); + private TaskAttemptId convertFromProtoFormat(TaskAttemptIdProto p) { + return new TaskAttemptId(p); } private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) { - return ((TaskAttemptIdPBImpl)t).getProto(); + return t.getProto(); } private TaskAttemptStateProto convertToProtoFormat(TaskAttemptState e) { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java deleted file mode 100644 index 8a88b87..0000000 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskIdPBImpl.java +++ /dev/null @@ -1,142 +0,0 @@ -/** -* Licensed to the Apache Software Foundation (ASF) under one -* or more contributor license agreements. See the NOTICE file -* distributed with this work for additional information -* regarding copyright ownership. The ASF licenses this file -* to you under the Apache License, Version 2.0 (the -* "License"); you may not use this file except in compliance -* with the License. You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package org.apache.hadoop.mapreduce.v2.api.records.impl.pb; - -import org.apache.hadoop.mapreduce.v2.api.records.JobId; -import org.apache.hadoop.mapreduce.v2.api.records.TaskId; -import org.apache.hadoop.mapreduce.v2.api.records.TaskType; -import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto; -import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto; -import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProtoOrBuilder; -import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskTypeProto; -import org.apache.hadoop.mapreduce.v2.util.MRProtoUtils; - -public class TaskIdPBImpl extends TaskId { - TaskIdProto proto = TaskIdProto.getDefaultInstance(); - TaskIdProto.Builder builder = null; - boolean viaProto = false; - - private JobId jobId = null; - - public TaskIdPBImpl() { - builder = TaskIdProto.newBuilder(proto); - } - - public TaskIdPBImpl(TaskIdProto proto) { - this.proto = proto; - viaProto = true; - } - - public synchronized TaskIdProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - private synchronized void mergeLocalToBuilder() { - if (this.jobId != null - && !((JobIdPBImpl) this.jobId).getProto().equals(builder.getJobId())) { - builder.setJobId(convertToProtoFormat(this.jobId)); - } - } - - private synchronized void mergeLocalToProto() { - if (viaProto) - maybeInitBuilder(); - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private synchronized void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = TaskIdProto.newBuilder(proto); - } - viaProto = false; - } - - @Override - public synchronized int getId() { - TaskIdProtoOrBuilder p = viaProto ? proto : builder; - return (p.getId()); - } - - @Override - public synchronized void setId(int id) { - maybeInitBuilder(); - builder.setId((id)); - } - - @Override - public synchronized JobId getJobId() { - TaskIdProtoOrBuilder p = viaProto ? proto : builder; - if (this.jobId != null) { - return this.jobId; - } - if (!p.hasJobId()) { - return null; - } - jobId = convertFromProtoFormat(p.getJobId()); - return jobId; - } - - @Override - public synchronized void setJobId(JobId jobId) { - maybeInitBuilder(); - if (jobId == null) - builder.clearJobId(); - this.jobId = jobId; - } - - @Override - public synchronized TaskType getTaskType() { - TaskIdProtoOrBuilder p = viaProto ? proto : builder; - if (!p.hasTaskType()) { - return null; - } - return convertFromProtoFormat(p.getTaskType()); - } - - @Override - public synchronized void setTaskType(TaskType taskType) { - maybeInitBuilder(); - if (taskType == null) { - builder.clearTaskType(); - return; - } - builder.setTaskType(convertToProtoFormat(taskType)); - } - - private JobIdPBImpl convertFromProtoFormat(JobIdProto p) { - return new JobIdPBImpl(p); - } - - private JobIdProto convertToProtoFormat(JobId t) { - return ((JobIdPBImpl)t).getProto(); - } - - private TaskTypeProto convertToProtoFormat(TaskType e) { - return MRProtoUtils.convertToProtoFormat(e); - } - - private TaskType convertFromProtoFormat(TaskTypeProto e) { - return MRProtoUtils.convertFromProtoFormat(e); - } -} \ No newline at end of file diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java index f1dfe32..b98db55 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java @@ -367,12 +367,12 @@ private CountersProto convertToProtoFormat(Counters t) { return ((CountersPBImpl)t).getProto(); } - private TaskIdPBImpl convertFromProtoFormat(TaskIdProto p) { - return new TaskIdPBImpl(p); + private TaskId convertFromProtoFormat(TaskIdProto p) { + return new TaskId(p); } private TaskIdProto convertToProtoFormat(TaskId t) { - return ((TaskIdPBImpl)t).getProto(); + return ((TaskId)t).getProto(); } private TaskStateProto convertToProtoFormat(TaskState e) { @@ -383,14 +383,11 @@ private TaskState convertFromProtoFormat(TaskStateProto e) { return MRProtoUtils.convertFromProtoFormat(e); } - private TaskAttemptIdPBImpl convertFromProtoFormat(TaskAttemptIdProto p) { - return new TaskAttemptIdPBImpl(p); + private TaskAttemptId convertFromProtoFormat(TaskAttemptIdProto p) { + return new TaskAttemptId(p); } private TaskAttemptIdProto convertToProtoFormat(TaskAttemptId t) { - return ((TaskAttemptIdPBImpl)t).getProto(); + return t.getProto(); } - - - } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java index 89cfe42..be8de06 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRBuilderUtils.java @@ -27,6 +27,9 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; +import org.apache.hadoop.mapreduce.v2.proto.MRProtos.JobIdProto; +import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskAttemptIdProto; +import org.apache.hadoop.mapreduce.v2.proto.MRProtos.TaskIdProto; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -36,10 +39,9 @@ public class MRBuilderUtils { public static JobId newJobId(ApplicationId appId, int id) { - JobId jobId = Records.newRecord(JobId.class); - jobId.setAppId(appId); + JobIdProto.Builder jobId = JobIdProto.newBuilder(); jobId.setId(id); - return jobId; + return new JobId(jobId, appId); } public static JobId newJobId(long clusterTs, int appIdInt, int id) { @@ -48,19 +50,16 @@ public static JobId newJobId(long clusterTs, int appIdInt, int id) { } public static TaskId newTaskId(JobId jobId, int id, TaskType taskType) { - TaskId taskId = Records.newRecord(TaskId.class); - taskId.setJobId(jobId); + TaskIdProto.Builder taskId = TaskIdProto.newBuilder(); taskId.setId(id); - taskId.setTaskType(taskType); - return taskId; + taskId.setTaskType(MRProtoUtils.convertToProtoFormat(taskType)); + return new TaskId(taskId, jobId); } public static TaskAttemptId newTaskAttemptId(TaskId taskId, int attemptId) { - TaskAttemptId taskAttemptId = - Records.newRecord(TaskAttemptId.class); - taskAttemptId.setTaskId(taskId); + TaskAttemptIdProto.Builder taskAttemptId = TaskAttemptIdProto.newBuilder(); taskAttemptId.setId(attemptId); - return taskAttemptId; + return new TaskAttemptId(taskAttemptId, taskId); } public static JobReport newJobReport(JobId jobId, String jobName, diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java index 49dec4a..aefff8c 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java @@ -30,7 +30,6 @@ import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.ApplicationReport; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationResourceUsageReportPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.QueueInfoPBImpl; @@ -70,7 +69,7 @@ public void testEnums() throws Exception { public void testFromYarn() throws Exception { int appStartTime = 612354; YarnApplicationState state = YarnApplicationState.RUNNING; - ApplicationId applicationId = new ApplicationIdPBImpl(); + ApplicationId applicationId = new ApplicationId(); ApplicationReportPBImpl applicationReport = new ApplicationReportPBImpl(); applicationReport.setApplicationId(applicationId); applicationReport.setYarnApplicationState(state); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java index 05497cc..dda9ce7 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java @@ -42,11 +42,9 @@ import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.ApplicationConstants; -import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -80,8 +78,7 @@ private static void delete(File dir) throws IOException { @Test (timeout = 120000) public void testJobIDtoString() { - JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class); - jid.setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class)); + JobId jid = MRBuilderUtils.newJobId(0, 0, 0); assertEquals("job_0_0000", MRApps.toString(jid)); } @@ -101,17 +98,16 @@ public void testJobIDShort() { //TODO_get.set @Test (timeout = 120000) public void testTaskIDtoString() { - TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class); - tid.setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class)); - tid.getJobId().setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class)); - tid.setTaskType(TaskType.MAP); + TaskId tid = MRBuilderUtils.newTaskId(MRBuilderUtils.newJobId(0, 0, 0), 0, + TaskType.MAP); TaskType type = tid.getTaskType(); System.err.println(type); type = TaskType.REDUCE; System.err.println(type); System.err.println(tid.getTaskType()); assertEquals("task_0_0000_m_000000", MRApps.toString(tid)); - tid.setTaskType(TaskType.REDUCE); + tid = MRBuilderUtils.newTaskId(MRBuilderUtils.newJobId(0, 0, 0), 0, + TaskType.REDUCE); assertEquals("task_0_0000_r_000000", MRApps.toString(tid)); } @@ -141,11 +137,11 @@ public void testTaskIDBadType() { //TODO_get.set @Test (timeout = 120000) public void testTaskAttemptIDtoString() { - TaskAttemptId taid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class); - taid.setTaskId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class)); - taid.getTaskId().setTaskType(TaskType.MAP); - taid.getTaskId().setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class)); - taid.getTaskId().getJobId().setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class)); + TaskAttemptId taid = MRBuilderUtils.newTaskAttemptId( + MRBuilderUtils.newTaskId( + MRBuilderUtils.newJobId(0, 0, 0), + 0, TaskType.MAP), + 0); assertEquals("attempt_0_0000_m_000000_0", MRApps.toString(taid)); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java index e172be5..ecf5b8f 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java @@ -21,7 +21,6 @@ import org.junit.Test; import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.mapred.Task.CombineOutputCollector; @@ -30,7 +29,6 @@ import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.mapreduce.task.reduce.Shuffle; -import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.Counters.Counter; import org.apache.hadoop.mapred.MapOutputFile; import org.apache.hadoop.mapred.JobConf; @@ -40,7 +38,6 @@ import org.apache.hadoop.mapred.TaskUmbilicalProtocol; import org.apache.hadoop.mapred.ShuffleConsumerPlugin; import org.apache.hadoop.mapred.RawKeyValueIterator; -import org.apache.hadoop.mapred.Reducer; /** * A JUnit for testing availability and accessibility of shuffle related API. @@ -181,10 +178,6 @@ public void testConsumerApi() { * AuxiliaryService(s) which are "Shuffle-Providers" (ShuffleHandler and 3rd party plugins) */ public void testProviderApi() { - - ApplicationId mockApplicationId = mock(ApplicationId.class); - mockApplicationId.setClusterTimestamp(new Long(10)); - mockApplicationId.setId(mock(JobID.class).getId()); LocalDirAllocator mockLocalDirAllocator = mock(LocalDirAllocator.class); JobConf mockJobConf = mock(JobConf.class); try { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java index 2a8affb..84080f1 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java @@ -44,9 +44,9 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.service.Service; +import org.apache.hadoop.yarn.util.BuilderUtils; import com.google.common.util.concurrent.ThreadFactoryBuilder; @@ -74,10 +74,8 @@ public void init(Configuration conf) throws YarnException { LOG.info("JobHistory Init"); this.conf = conf; - this.appID = RecordFactoryProvider.getRecordFactory(conf) - .newRecordInstance(ApplicationId.class); - this.appAttemptID = RecordFactoryProvider.getRecordFactory(conf) - .newRecordInstance(ApplicationAttemptId.class); + this.appID = BuilderUtils.newApplicationId(0, 0); + this.appAttemptID = BuilderUtils.newApplicationAttemptId(appID, 0); moveThreadInterval = conf.getLong( JHAdminConfig.MR_HISTORY_MOVE_INTERVAL_MS, diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java index a283954..14d5630 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java @@ -33,7 +33,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.MRJobConfig; @@ -64,7 +63,6 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.YarnException; @@ -314,7 +312,7 @@ private synchronized Object invoke(String method, Class argClass, realProxy = null; // HS/AMS shut down maxRetries--; - lastException = new IOException(e.getMessage()); + lastException = new IOException(e); } catch (Exception e) { LOG.debug("Failed to contact AM/History for job " + jobId @@ -323,7 +321,7 @@ private synchronized Object invoke(String method, Class argClass, realProxy = null; // RM shutdown maxRetries--; - lastException = new IOException(e.getMessage()); + lastException = new IOException(e); } } throw lastException; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java index c33ab38..7b8775f 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java @@ -80,10 +80,9 @@ private ApplicationReport getUnknownApplicationReport() { - ApplicationId unknownAppId = recordFactory - .newRecordInstance(ApplicationId.class); - ApplicationAttemptId unknownAttemptId = recordFactory - .newRecordInstance(ApplicationAttemptId.class); + ApplicationId unknownAppId = BuilderUtils.newApplicationId(0, 0); + ApplicationAttemptId unknownAttemptId = + BuilderUtils.newApplicationAttemptId(unknownAppId, 0); // Setting AppState to NEW and finalStatus to UNDEFINED as they are never // used for a non running job diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java index cd325a1..0adbe41 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java @@ -31,13 +31,13 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetAllApplicationsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse; -import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.Records; import org.junit.Test; import org.mockito.ArgumentCaptor; @@ -119,7 +119,7 @@ private ApplicationReport getApplicationReport( ApplicationResourceUsageReport appResources = Mockito .mock(ApplicationResourceUsageReport.class); Mockito.when(appReport.getApplicationId()).thenReturn( - Records.newRecord(ApplicationId.class)); + BuilderUtils.newApplicationId(0, 0)); Mockito.when(appResources.getNeededResources()).thenReturn( Records.newRecord(Resource.class)); Mockito.when(appResources.getReservedResources()).thenReturn( diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java index 5675742..55d8bcc 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java @@ -84,6 +84,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.log4j.Appender; import org.apache.log4j.Layout; import org.apache.log4j.Logger; @@ -135,9 +136,7 @@ public ApplicationSubmissionContext answer(InvocationOnMock invocation) ).when(yarnRunner).createApplicationSubmissionContext(any(Configuration.class), any(String.class), any(Credentials.class)); - appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(1); + appId = BuilderUtils.newApplicationId(System.currentTimeMillis(), 1); jobId = TypeConverter.fromYarn(appId); if (testWorkDir.exists()) { FileContext.getLocalFSFileContext().delete(new Path(testWorkDir.toString()), true); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java index 0beb430..5585459 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java @@ -77,6 +77,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; import org.apache.hadoop.yarn.service.AbstractService; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.Records; import org.jboss.netty.bootstrap.ServerBootstrap; @@ -542,9 +543,8 @@ protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, // $x/$user/appcache/$appId/output/$mapId // TODO: Once Shuffle is out of NM, this can use MR APIs to convert between App and Job JobID jobID = JobID.forName(jobId); - ApplicationId appID = Records.newRecord(ApplicationId.class); - appID.setClusterTimestamp(Long.parseLong(jobID.getJtIdentifier())); - appID.setId(jobID.getId()); + ApplicationId appID = BuilderUtils.newApplicationId( + Long.valueOf(jobID.getJtIdentifier()), jobID.getId()); final String base = ContainerLocalizer.USERCACHE + "/" + user + "/" + ContainerLocalizer.APPCACHE + "/" diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl.java index 68caaa0..8febe8e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/AllocateRequestPBImpl.java @@ -25,12 +25,9 @@ import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ProtoBase; import org.apache.hadoop.yarn.api.records.ResourceRequest; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ResourceRequestPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; @@ -309,12 +306,12 @@ public void clearReleases() { this.release.clear(); } - private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) { - return new ApplicationAttemptIdPBImpl(p); + private ApplicationAttemptId convertFromProtoFormat(ApplicationAttemptIdProto p) { + return new ApplicationAttemptId(p); } private ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) { - return ((ApplicationAttemptIdPBImpl)t).getProto(); + return t.getProto(); } private ResourceRequestPBImpl convertFromProtoFormat(ResourceRequestProto p) { @@ -325,11 +322,11 @@ private ResourceRequestProto convertToProtoFormat(ResourceRequest t) { return ((ResourceRequestPBImpl)t).getProto(); } - private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { - return new ContainerIdPBImpl(p); + private ContainerId convertFromProtoFormat(ContainerIdProto p) { + return new ContainerId(p); } private ContainerIdProto convertToProtoFormat(ContainerId t) { - return ((ContainerIdPBImpl)t).getProto(); + return t.getProto(); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java index 01b3738..a3822ff 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java @@ -23,7 +23,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto; @@ -140,12 +139,12 @@ public void setFinishApplicationStatus(FinalApplicationStatus finishState) { builder.setFinalApplicationStatus(convertToProtoFormat(finishState)); } - private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) { - return new ApplicationAttemptIdPBImpl(p); + private ApplicationAttemptId convertFromProtoFormat(ApplicationAttemptIdProto p) { + return new ApplicationAttemptId(p); } private ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) { - return ((ApplicationAttemptIdPBImpl)t).getProto(); + return t.getProto(); } private FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto s) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationReportRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationReportRequestPBImpl.java index e7190f1..97dcd55 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationReportRequestPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationReportRequestPBImpl.java @@ -22,7 +22,6 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setApplicationId(ApplicationId applicationId) { this.applicationId = applicationId; } - private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) { - return new ApplicationIdPBImpl(p); + private ApplicationId convertFromProtoFormat(ApplicationIdProto p) { + return new ApplicationId(p); } private ApplicationIdProto convertToProtoFormat(ApplicationId t) { - return ((ApplicationIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerStatusRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerStatusRequestPBImpl.java index 33e8ca3..ad6e856 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerStatusRequestPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetContainerStatusRequestPBImpl.java @@ -22,7 +22,6 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusRequest; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setContainerId(ContainerId containerId) { this.containerId = containerId; } - private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { - return new ContainerIdPBImpl(p); + private ContainerId convertFromProtoFormat(ContainerIdProto p) { + return new ContainerId(p); } private ContainerIdProto convertToProtoFormat(ContainerId t) { - return ((ContainerIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNewApplicationResponsePBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNewApplicationResponsePBImpl.java index d15f1b7..c900e4a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNewApplicationResponsePBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetNewApplicationResponsePBImpl.java @@ -23,7 +23,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ProtoBase; import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto; @@ -154,12 +153,12 @@ public void setMinimumResourceCapability(Resource capability) { this.minimumResourceCapability = capability; } - private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) { - return new ApplicationIdPBImpl(p); + private ApplicationId convertFromProtoFormat(ApplicationIdProto p) { + return new ApplicationId(p); } private ApplicationIdProto convertToProtoFormat(ApplicationId t) { - return ((ApplicationIdPBImpl)t).getProto(); + return t.getProto(); } private Resource convertFromProtoFormat(ResourceProto resource) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/KillApplicationRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/KillApplicationRequestPBImpl.java index e2761a0..2e949c7 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/KillApplicationRequestPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/KillApplicationRequestPBImpl.java @@ -22,7 +22,6 @@ import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProtoOrBuilder; @@ -96,14 +95,11 @@ public void setApplicationId(ApplicationId applicationId) { this.applicationId = applicationId; } - private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) { - return new ApplicationIdPBImpl(p); + private ApplicationId convertFromProtoFormat(ApplicationIdProto p) { + return new ApplicationId(p); } private ApplicationIdProto convertToProtoFormat(ApplicationId t) { - return ((ApplicationIdPBImpl)t).getProto(); + return t.getProto(); } - - - } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterRequestPBImpl.java index 1ab2777..bf59d7f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterRequestPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterRequestPBImpl.java @@ -22,7 +22,6 @@ import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProtoOrBuilder; @@ -54,7 +53,7 @@ public RegisterApplicationMasterRequestProto getProto() { } private void mergeLocalToBuilder() { - if (this.applicationAttemptId != null && !((ApplicationAttemptIdPBImpl)this.applicationAttemptId).getProto().equals(builder.getApplicationAttemptId())) { + if (this.applicationAttemptId != null && !((ApplicationAttemptId)this.applicationAttemptId).getProto().equals(builder.getApplicationAttemptId())) { builder.setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptId)); } } @@ -132,12 +131,12 @@ public void setTrackingUrl(String url) { builder.setTrackingUrl(url); } - private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) { - return new ApplicationAttemptIdPBImpl(p); + private ApplicationAttemptId convertFromProtoFormat(ApplicationAttemptIdProto p) { + return new ApplicationAttemptId(p); } private ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) { - return ((ApplicationAttemptIdPBImpl)t).getProto(); + return t.getProto(); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainerRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainerRequestPBImpl.java index 1aa59cf..62770ef 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainerRequestPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/StopContainerRequestPBImpl.java @@ -22,7 +22,6 @@ import org.apache.hadoop.yarn.api.protocolrecords.StopContainerRequest; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainerRequestProtoOrBuilder; @@ -96,12 +95,12 @@ public void setContainerId(ContainerId containerId) { this.containerId = containerId; } - private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { - return new ContainerIdPBImpl(p); + private ContainerId convertFromProtoFormat(ContainerIdProto p) { + return new ContainerId(p); } private ContainerIdProto convertToProtoFormat(ContainerId t) { - return ((ContainerIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java index 24f15ce..7e069de 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationAttemptId.java @@ -20,10 +20,9 @@ import java.text.NumberFormat; -import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; -import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; /** *

ApplicationAttemptId denotes the particular attempt @@ -35,32 +34,46 @@ */ @Public @Stable -public abstract class ApplicationAttemptId implements +public class ApplicationAttemptId implements Comparable { public static final String appAttemptIdStrPrefix = "appattempt_"; + private final ApplicationAttemptIdProto proto; + private final ApplicationId applicationId; + public ApplicationAttemptId() { + this(ApplicationAttemptIdProto.getDefaultInstance()); + } + + public ApplicationAttemptId(ApplicationId appId, + ApplicationAttemptIdProto.Builder builder) { + this.applicationId = appId; + builder.setApplicationId(appId.getProto()); + this.proto = builder.build(); + } + + public ApplicationAttemptId(ApplicationAttemptIdProto proto) { + this.proto = proto; + this.applicationId = new ApplicationId(proto.getApplicationId()); + } + /** * Get the ApplicationId of the ApplicationAttempId. * @return ApplicationId of the ApplicationAttempId */ @Public @Stable - public abstract ApplicationId getApplicationId(); - - @Private - @Unstable - public abstract void setApplicationId(ApplicationId appID); - + public int getAttemptId() { + return proto.getAttemptId(); + } + /** * Get the attempt id of the Application. * @return attempt id of the Application */ - public abstract int getAttemptId(); - - @Private - @Unstable - public abstract void setAttemptId(int attemptId); + public ApplicationId getApplicationId() { + return applicationId; + } static final ThreadLocal attemptIdFormat = new ThreadLocal() { @@ -120,4 +133,8 @@ public String toString() { sb.append("_").append(attemptIdFormat.get().format(getAttemptId())); return sb.toString(); } + + public ApplicationAttemptIdProto getProto() { + return proto; + } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java index 097a533..b2b8888 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java @@ -20,10 +20,11 @@ import java.text.NumberFormat; -import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; -import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; + +import com.google.common.annotations.VisibleForTesting; /** *

ApplicationId represents the globally unique @@ -36,9 +37,18 @@ */ @Public @Stable -public abstract class ApplicationId implements Comparable { - +public class ApplicationId implements Comparable { public static final String appIdStrPrefix = "application_"; + private final ApplicationIdProto proto; + + @VisibleForTesting + public ApplicationId() { + this(ApplicationIdProto.getDefaultInstance()); + } + + public ApplicationId(ApplicationIdProto proto) { + this.proto = proto; + } /** * Get the short integer identifier of the ApplicationId @@ -48,23 +58,18 @@ */ @Public @Stable - public abstract int getId(); - - @Private - @Unstable - public abstract void setId(int id); - + public int getId() { + return proto.getId(); + } + /** * Get the start time of the ResourceManager which is * used to generate globally unique ApplicationId. * @return start time of the ResourceManager */ - public abstract long getClusterTimestamp(); - - @Private - @Unstable - public abstract void setClusterTimestamp(long clusterTimestamp); - + public long getClusterTimestamp() { + return proto.getClusterTimestamp(); + } static final ThreadLocal appIdFormat = @@ -121,4 +126,8 @@ public boolean equals(Object obj) { return false; return true; } + + public ApplicationIdProto getProto() { + return proto; + } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java index 68cd09e..48fa820 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java @@ -20,10 +20,9 @@ import java.text.NumberFormat; -import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; -import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; /** *

ContainerId represents a globally unique identifier @@ -31,7 +30,27 @@ */ @Public @Stable -public abstract class ContainerId implements Comparable{ +public class ContainerId implements Comparable{ + private final ContainerIdProto proto; + private final ApplicationAttemptId applicationAttemptId; + + public ContainerId() { + this(ContainerIdProto.getDefaultInstance()); + } + + public ContainerId(ContainerIdProto proto) { + this.proto = proto; + this.applicationAttemptId = + new ApplicationAttemptId(proto.getAppAttemptId()); + } + + public ContainerId(ApplicationAttemptId appAttemptId, + ContainerIdProto.Builder builder) { + this.applicationAttemptId = appAttemptId; + builder.setAppAttemptId(appAttemptId.getProto()); + this.proto = builder.build(); + } + /** * Get the ApplicationAttemptId of the application to which * the Container was assigned. @@ -40,23 +59,19 @@ */ @Public @Stable - public abstract ApplicationAttemptId getApplicationAttemptId(); + public ApplicationAttemptId getApplicationAttemptId() { + return applicationAttemptId; + } - @Private - @Unstable - public abstract void setApplicationAttemptId(ApplicationAttemptId atId); - /** * Get the identifier of the ContainerId. * @return identifier of the ContainerId */ @Public @Stable - public abstract int getId(); - - @Private - @Unstable - public abstract void setId(int id); + public int getId() { + return proto.getId(); + } // TODO: fail the app submission if attempts are more than 10 or something @@ -135,4 +150,8 @@ public String toString() { sb.append(containerIdFormat.get().format(getId())); return sb.toString(); } + + public ContainerIdProto getProto() { + return proto; + } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java index 886f88e..6153056 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeId.java @@ -18,10 +18,9 @@ package org.apache.hadoop.yarn.api.records; -import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; -import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; /** *

NodeId is the unique identifier for a node.

@@ -32,7 +31,16 @@ */ @Public @Stable -public abstract class NodeId implements Comparable { +public class NodeId implements Comparable { + private final NodeIdProto proto; + + public NodeId() { + proto = NodeIdProto.getDefaultInstance(); + } + + public NodeId(NodeIdProto proto) { + this.proto = proto; + } /** * Get the hostname of the node. @@ -40,11 +48,9 @@ */ @Public @Stable - public abstract String getHost(); - - @Private - @Unstable - public abstract void setHost(String host); + public String getHost() { + return proto.getHost(); + } /** * Get the port for communicating with the node. @@ -52,11 +58,9 @@ */ @Public @Stable - public abstract int getPort(); - - @Private - @Unstable - public abstract void setPort(int port); + public int getPort() { + return proto.getPort(); + } @Override public String toString() { @@ -102,4 +106,7 @@ public int compareTo(NodeId other) { return hostCompare; } + public NodeIdProto getProto() { + return proto; + } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptIdPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptIdPBImpl.java deleted file mode 100644 index b118899..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptIdPBImpl.java +++ /dev/null @@ -1,114 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.api.records.impl.pb; - -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; - -public class ApplicationAttemptIdPBImpl extends ApplicationAttemptId { - ApplicationAttemptIdProto proto = ApplicationAttemptIdProto - .getDefaultInstance(); - ApplicationAttemptIdProto.Builder builder = null; - boolean viaProto = false; - - private ApplicationId applicationId = null; - - public ApplicationAttemptIdPBImpl() { - builder = ApplicationAttemptIdProto.newBuilder(); - } - - public ApplicationAttemptIdPBImpl(ApplicationAttemptIdProto proto) { - this.proto = proto; - viaProto = true; - } - - public synchronized ApplicationAttemptIdProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - private synchronized void mergeLocalToBuilder() { - if (this.applicationId != null - && !((ApplicationIdPBImpl) applicationId).getProto().equals( - builder.getApplicationId())) { - builder.setApplicationId(convertToProtoFormat(this.applicationId)); - } - } - - private synchronized void mergeLocalToProto() { - if (viaProto) - maybeInitBuilder(); - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private synchronized void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = ApplicationAttemptIdProto.newBuilder(proto); - } - viaProto = false; - } - - - @Override - public synchronized int getAttemptId() { - ApplicationAttemptIdProtoOrBuilder p = viaProto ? proto : builder; - return (p.getAttemptId()); - } - - @Override - public synchronized void setAttemptId(int attemptId) { - maybeInitBuilder(); - builder.setAttemptId((attemptId)); - } - @Override - public synchronized ApplicationId getApplicationId() { - ApplicationAttemptIdProtoOrBuilder p = viaProto ? proto : builder; - if (this.applicationId != null) { - return this.applicationId; - } - if (!p.hasApplicationId()) { - return null; - } - this.applicationId = convertFromProtoFormat(p.getApplicationId()); - return this.applicationId; - } - - @Override - public synchronized void setApplicationId(ApplicationId appId) { - maybeInitBuilder(); - if (appId == null) - builder.clearApplicationId(); - this.applicationId = appId; - } - - private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) { - return new ApplicationIdPBImpl(p); - } - - private ApplicationIdProto convertToProtoFormat(ApplicationId t) { - return ((ApplicationIdPBImpl)t).getProto(); - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptStateDataPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptStateDataPBImpl.java index fa0a596e..4c0b151 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptStateDataPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationAttemptStateDataPBImpl.java @@ -56,7 +56,7 @@ public ApplicationAttemptStateDataProto getProto() { private void mergeLocalToBuilder() { if (this.attemptId != null) { - builder.setAttemptId(((ApplicationAttemptIdPBImpl)attemptId).getProto()); + builder.setAttemptId(((ApplicationAttemptId)attemptId).getProto()); } if(this.masterContainer != null) { builder.setMasterContainer(((ContainerPBImpl)masterContainer).getProto()); @@ -87,7 +87,7 @@ public ApplicationAttemptId getAttemptId() { if (!p.hasAttemptId()) { return null; } - attemptId = new ApplicationAttemptIdPBImpl(p.getAttemptId()); + attemptId = new ApplicationAttemptId(p.getAttemptId()); return attemptId; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationIdPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationIdPBImpl.java deleted file mode 100644 index ad5c778..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationIdPBImpl.java +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.api.records.impl.pb; - - -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder; - - - -public class ApplicationIdPBImpl extends ApplicationId { - ApplicationIdProto proto = ApplicationIdProto.getDefaultInstance(); - ApplicationIdProto.Builder builder = null; - boolean viaProto = false; - - public ApplicationIdPBImpl() { - builder = ApplicationIdProto.newBuilder(); - } - - public ApplicationIdPBImpl(ApplicationIdProto proto) { - this.proto = proto; - viaProto = true; - } - - public synchronized ApplicationIdProto getProto() { - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - private synchronized void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = ApplicationIdProto.newBuilder(proto); - } - viaProto = false; - } - - - @Override - public synchronized int getId() { - ApplicationIdProtoOrBuilder p = viaProto ? proto : builder; - return (p.getId()); - } - - @Override - public synchronized void setId(int id) { - maybeInitBuilder(); - builder.setId((id)); - } - @Override - public synchronized long getClusterTimestamp() { - ApplicationIdProtoOrBuilder p = viaProto ? proto : builder; - return (p.getClusterTimestamp()); - } - - @Override - public synchronized void setClusterTimestamp(long clusterTimestamp) { - maybeInitBuilder(); - builder.setClusterTimestamp((clusterTimestamp)); - } -} \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationMasterPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationMasterPBImpl.java index f136a4a..995db89 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationMasterPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationMasterPBImpl.java @@ -61,7 +61,7 @@ public ApplicationMasterProto getProto() { private void mergeLocalToBuilder() { if (this.applicationId != null - && !((ApplicationIdPBImpl) this.applicationId).getProto().equals( + && !((ApplicationId) this.applicationId).getProto().equals( builder.getApplicationId())) { builder.setApplicationId(convertToProtoFormat(this.applicationId)); } @@ -268,12 +268,12 @@ private YarnApplicationState convertFromProtoFormat(YarnApplicationStateProto e) return ProtoUtils.convertFromProtoFormat(e); } - private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) { - return new ApplicationIdPBImpl(p); + private ApplicationId convertFromProtoFormat(ApplicationIdProto p) { + return new ApplicationId(p); } private ApplicationIdProto convertToProtoFormat(ApplicationId t) { - return ((ApplicationIdPBImpl)t).getProto(); + return t.getProto(); } private ApplicationStatusPBImpl convertFromProtoFormat(ApplicationStatusProto p) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java index 69f939c..2fadf82 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java @@ -355,12 +355,12 @@ public ApplicationReportProto getProto() { private void mergeLocalToBuilder() { if (this.applicationId != null - && !((ApplicationIdPBImpl) this.applicationId).getProto().equals( + && !((ApplicationId) this.applicationId).getProto().equals( builder.getApplicationId())) { builder.setApplicationId(convertToProtoFormat(this.applicationId)); } if (this.currentApplicationAttemptId != null - && !((ApplicationAttemptIdPBImpl) this.currentApplicationAttemptId).getProto().equals( + && !((ApplicationAttemptId) this.currentApplicationAttemptId).getProto().equals( builder.getCurrentApplicationAttemptId())) { builder.setCurrentApplicationAttemptId(convertToProtoFormat(this.currentApplicationAttemptId)); } @@ -387,11 +387,11 @@ private void maybeInitBuilder() { } private ApplicationIdProto convertToProtoFormat(ApplicationId t) { - return ((ApplicationIdPBImpl) t).getProto(); + return t.getProto(); } private ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) { - return ((ApplicationAttemptIdPBImpl) t).getProto(); + return t.getProto(); } private ApplicationResourceUsageReport convertFromProtoFormat(ApplicationResourceUsageReportProto s) { @@ -402,14 +402,14 @@ private ApplicationResourceUsageReportProto convertToProtoFormat(ApplicationReso return ProtoUtils.convertToProtoFormat(s); } - private ApplicationIdPBImpl convertFromProtoFormat( + private ApplicationId convertFromProtoFormat( ApplicationIdProto applicationId) { - return new ApplicationIdPBImpl(applicationId); + return new ApplicationId(applicationId); } - private ApplicationAttemptIdPBImpl convertFromProtoFormat( + private ApplicationAttemptId convertFromProtoFormat( ApplicationAttemptIdProto applicationAttemptId) { - return new ApplicationAttemptIdPBImpl(applicationAttemptId); + return new ApplicationAttemptId(applicationAttemptId); } private YarnApplicationState convertFromProtoFormat(YarnApplicationStateProto s) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationStatusPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationStatusPBImpl.java index ccfea17..6589d05 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationStatusPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationStatusPBImpl.java @@ -54,7 +54,7 @@ public ApplicationStatusProto getProto() { } private void mergeLocalToBuilder() { - if (this.applicationAttemptId != null && !((ApplicationAttemptIdPBImpl)this.applicationAttemptId).getProto().equals(builder.getApplicationAttemptId())) { + if (this.applicationAttemptId != null && !((ApplicationAttemptId)this.applicationAttemptId).getProto().equals(builder.getApplicationAttemptId())) { builder.setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptId)); } } @@ -121,12 +121,12 @@ public void setProgress(float progress) { builder.setProgress((progress)); } - private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) { - return new ApplicationAttemptIdPBImpl(p); + private ApplicationAttemptId convertFromProtoFormat(ApplicationAttemptIdProto p) { + return new ApplicationAttemptId(p); } private ApplicationAttemptIdProto convertToProtoFormat(ApplicationAttemptId t) { - return ((ApplicationAttemptIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java index a6a890c..ab9a45d 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java @@ -252,12 +252,12 @@ private PriorityProto convertToProtoFormat(Priority t) { return ((PriorityPBImpl)t).getProto(); } - private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) { - return new ApplicationIdPBImpl(p); + private ApplicationId convertFromProtoFormat(ApplicationIdProto p) { + return new ApplicationId(p); } private ApplicationIdProto convertToProtoFormat(ApplicationId t) { - return ((ApplicationIdPBImpl)t).getProto(); + return t.getProto(); } private ContainerLaunchContextPBImpl convertFromProtoFormat( diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerIdPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerIdPBImpl.java deleted file mode 100644 index 3227ce8..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerIdPBImpl.java +++ /dev/null @@ -1,118 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.api.records.impl.pb; - -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; -import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder; - - -public class ContainerIdPBImpl extends ContainerId { - ContainerIdProto proto = ContainerIdProto.getDefaultInstance(); - ContainerIdProto.Builder builder = null; - boolean viaProto = false; - - private ApplicationAttemptId applicationAttemptId = null; - - public ContainerIdPBImpl() { - builder = ContainerIdProto.newBuilder(); - } - - public ContainerIdPBImpl(ContainerIdProto proto) { - this.proto = proto; - viaProto = true; - } - - public synchronized ContainerIdProto getProto() { - mergeLocalToProto(); - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - private synchronized void mergeLocalToBuilder() { - if (this.applicationAttemptId != null && ! - ((ApplicationAttemptIdPBImpl)applicationAttemptId).getProto().equals( - builder.getAppAttemptId())) { - builder.setAppAttemptId(convertToProtoFormat(this.applicationAttemptId)); - } - } - - private synchronized void mergeLocalToProto() { - if (viaProto) - maybeInitBuilder(); - mergeLocalToBuilder(); - proto = builder.build(); - viaProto = true; - } - - private synchronized void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = ContainerIdProto.newBuilder(proto); - } - viaProto = false; - } - - - @Override - public synchronized int getId() { - ContainerIdProtoOrBuilder p = viaProto ? proto : builder; - return (p.getId()); - } - - @Override - public synchronized void setId(int id) { - maybeInitBuilder(); - builder.setId((id)); - } - - - @Override - public synchronized ApplicationAttemptId getApplicationAttemptId() { - ContainerIdProtoOrBuilder p = viaProto ? proto : builder; - if (this.applicationAttemptId != null) { - return this.applicationAttemptId; - } - if (!p.hasAppAttemptId()) { - return null; - } - this.applicationAttemptId = convertFromProtoFormat(p.getAppAttemptId()); - return this.applicationAttemptId; - } - - @Override - public synchronized void setApplicationAttemptId(ApplicationAttemptId atId) { - maybeInitBuilder(); - if (atId == null) - builder.clearAppAttemptId(); - this.applicationAttemptId = atId; - } - - private ApplicationAttemptIdPBImpl convertFromProtoFormat( - ApplicationAttemptIdProto p) { - return new ApplicationAttemptIdPBImpl(p); - } - - private ApplicationAttemptIdProto convertToProtoFormat( - ApplicationAttemptId t) { - return ((ApplicationAttemptIdPBImpl)t).getProto(); - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl.java index b8ba4df..fefe187 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerLaunchContextPBImpl.java @@ -77,7 +77,7 @@ public ContainerLaunchContextProto getProto() { private void mergeLocalToBuilder() { if (this.containerId != null && - !((ContainerIdPBImpl)containerId).getProto().equals( + !((ContainerId)containerId).getProto().equals( builder.getContainerId())) { builder.setContainerId(convertToProtoFormat(this.containerId)); } @@ -508,12 +508,12 @@ private ResourceProto convertToProtoFormat(Resource t) { return ((ResourcePBImpl)t).getProto(); } - private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { - return new ContainerIdPBImpl(p); + private ContainerId convertFromProtoFormat(ContainerIdProto p) { + return new ContainerId(p); } private ContainerIdProto convertToProtoFormat(ContainerId t) { - return ((ContainerIdPBImpl)t).getProto(); + return t.getProto(); } private LocalResourcePBImpl convertFromProtoFormat(LocalResourceProto p) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.java index 92a710a..1f7d69c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerPBImpl.java @@ -70,12 +70,12 @@ public ContainerProto getProto() { private void mergeLocalToBuilder() { if (this.containerId != null - && !((ContainerIdPBImpl) containerId).getProto().equals( + && !((ContainerId) containerId).getProto().equals( builder.getId())) { builder.setId(convertToProtoFormat(this.containerId)); } if (this.nodeId != null - && !((NodeIdPBImpl) nodeId).getProto().equals( + && !((NodeId) nodeId).getProto().equals( builder.getNodeId())) { builder.setNodeId(convertToProtoFormat(this.nodeId)); } @@ -289,20 +289,20 @@ private ContainerState convertFromProtoFormat(ContainerStateProto e) { return ProtoUtils.convertFromProtoFormat(e); } - private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { - return new ContainerIdPBImpl(p); + private ContainerId convertFromProtoFormat(ContainerIdProto p) { + return new ContainerId(p); } - private NodeIdPBImpl convertFromProtoFormat(NodeIdProto p) { - return new NodeIdPBImpl(p); + private NodeId convertFromProtoFormat(NodeIdProto p) { + return new NodeId(p); } private ContainerIdProto convertToProtoFormat(ContainerId t) { - return ((ContainerIdPBImpl)t).getProto(); + return t.getProto(); } private NodeIdProto convertToProtoFormat(NodeId t) { - return ((NodeIdPBImpl)t).getProto(); + return t.getProto(); } private ResourcePBImpl convertFromProtoFormat(ResourceProto p) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java index bfde5dd..6671055 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ContainerStatusPBImpl.java @@ -148,12 +148,12 @@ private ContainerState convertFromProtoFormat(ContainerStateProto e) { return ProtoUtils.convertFromProtoFormat(e); } - private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { - return new ContainerIdPBImpl(p); + private ContainerId convertFromProtoFormat(ContainerIdProto p) { + return new ContainerId(p); } private ContainerIdProto convertToProtoFormat(ContainerId t) { - return ((ContainerIdPBImpl)t).getProto(); + return t.getProto(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeIdPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeIdPBImpl.java deleted file mode 100644 index f0a7830..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeIdPBImpl.java +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.api.records.impl.pb; - - -import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; -import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder; - - - -public class NodeIdPBImpl extends NodeId { - NodeIdProto proto = NodeIdProto.getDefaultInstance(); - NodeIdProto.Builder builder = null; - boolean viaProto = false; - - public NodeIdPBImpl() { - builder = NodeIdProto.newBuilder(); - } - - public NodeIdPBImpl(NodeIdProto proto) { - this.proto = proto; - viaProto = true; - } - - public synchronized NodeIdProto getProto() { - proto = viaProto ? proto : builder.build(); - viaProto = true; - return proto; - } - - private synchronized void maybeInitBuilder() { - if (viaProto || builder == null) { - builder = NodeIdProto.newBuilder(proto); - } - viaProto = false; - } - - @Override - public synchronized String getHost() { - NodeIdProtoOrBuilder p = viaProto ? proto : builder; - return (p.getHost()); - } - - @Override - public synchronized void setHost(String host) { - maybeInitBuilder(); - builder.setHost((host)); - } - - @Override - public synchronized int getPort() { - NodeIdProtoOrBuilder p = viaProto ? proto : builder; - return (p.getPort()); - } - - @Override - public synchronized void setPort(int port) { - maybeInitBuilder(); - builder.setPort((port)); - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeReportPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeReportPBImpl.java index 115a7da..876a3f1 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeReportPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/NodeReportPBImpl.java @@ -216,7 +216,7 @@ public NodeReportProto getProto() { private void mergeLocalToBuilder() { if (this.nodeId != null - && !((NodeIdPBImpl) this.nodeId).getProto().equals( + && !((NodeId) this.nodeId).getProto().equals( builder.getNodeId())) { builder.setNodeId(convertToProtoFormat(this.nodeId)); } @@ -253,12 +253,12 @@ private void maybeInitBuilder() { } - private NodeIdPBImpl convertFromProtoFormat(NodeIdProto p) { - return new NodeIdPBImpl(p); + private NodeId convertFromProtoFormat(NodeIdProto p) { + return new NodeId(p); } private NodeIdProto convertToProtoFormat(NodeId nodeId) { - return ((NodeIdPBImpl) nodeId).getProto(); + return nodeId.getProto(); } private ResourcePBImpl convertFromProtoFormat(ResourceProto p) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/ProtoUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/ProtoUtils.java index 0fea4aa..fc2b5c0 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/ProtoUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/ProtoUtils.java @@ -37,7 +37,6 @@ import org.apache.hadoop.yarn.api.records.QueueState; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationResourceUsageReportPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto; @@ -81,10 +80,10 @@ public static NodeState convertFromProtoFormat(NodeStateProto e) { * NodeId */ public static NodeIdProto convertToProtoFormat(NodeId e) { - return ((NodeIdPBImpl)e).getProto(); + return e.getProto(); } public static NodeId convertFromProtoFormat(NodeIdProto e) { - return new NodeIdPBImpl(e); + return new NodeId(e); } /* diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java index 605dc3b..cd355bd 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java @@ -50,6 +50,7 @@ import org.apache.hadoop.yarn.client.YarnClientImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.Records; /** @@ -185,9 +186,7 @@ public void launchAM(ApplicationAttemptId attemptId) throws IOException { envAMList.add("CLASSPATH="+classpath); } - ContainerId containerId = Records.newRecord(ContainerId.class); - containerId.setApplicationAttemptId(attemptId); - containerId.setId(0); + ContainerId containerId = BuilderUtils.newContainerId(attemptId, 0); String hostname = InetAddress.getLocalHost().getHostName(); envAMList.add(ApplicationConstants.AM_CONTAINER_ID_ENV + "=" + containerId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java index d7da036..20092f3 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java @@ -41,8 +41,6 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader; import org.apache.hadoop.yarn.util.ConverterUtils; @@ -98,10 +96,8 @@ public int run(String[] args) throws Exception { return -1; } - RecordFactory recordFactory = - RecordFactoryProvider.getRecordFactory(getConf()); ApplicationId appId = - ConverterUtils.toApplicationId(recordFactory, appIdStr); + ConverterUtils.toApplicationId(appIdStr); if (appOwner == null || appOwner.isEmpty()) { appOwner = UserGroupInformation.getCurrentUser().getShortUserName(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java index b3baff7..c2d6173 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java @@ -24,7 +24,6 @@ import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.yarn.YarnException; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import static org.apache.hadoop.yarn.util.StringHelper.*; @@ -45,11 +44,10 @@ public static ApplicationId toAppID(String prefix, String s, Iterator it throwParseException(sjoin(prefix, ID), s); } shouldHaveNext(prefix, s, it); - ApplicationId appId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(Long.parseLong(it.next())); + long ts = Long.parseLong(it.next()); shouldHaveNext(prefix, s, it); - appId.setId(Integer.parseInt(it.next())); - return appId; + int id = Integer.parseInt(it.next()); + return BuilderUtils.newApplicationId(ts, id); } public static void shouldHaveNext(String prefix, String s, Iterator it) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java index 502f1dd..40e31a9 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java @@ -58,6 +58,10 @@ import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; +import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; /** @@ -126,55 +130,40 @@ public static LocalResource newLocalResource(URI uri, visibility, size, timestamp); } - public static ApplicationId newApplicationId(RecordFactory recordFactory, - long clustertimestamp, CharSequence id) { - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setId(Integer.valueOf(id.toString())); - applicationId.setClusterTimestamp(clustertimestamp); - return applicationId; - } - - public static ApplicationId newApplicationId(RecordFactory recordFactory, - long clusterTimeStamp, int id) { - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setId(id); - applicationId.setClusterTimestamp(clusterTimeStamp); - return applicationId; + public static ApplicationId newApplicationId( + CharSequence clustertimestamp, CharSequence id) { + return newApplicationId(Long.valueOf(clustertimestamp.toString()), id); } public static ApplicationId newApplicationId(long clusterTimeStamp, int id) { - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setId(id); - applicationId.setClusterTimestamp(clusterTimeStamp); - return applicationId; + ApplicationIdProto.Builder appId = ApplicationIdProto.newBuilder(); + appId.setId(id); + appId.setClusterTimestamp(clusterTimeStamp); + return new ApplicationId(appId.build()); } + public static ApplicationId newApplicationId(long clustertimestamp, CharSequence id) { + return newApplicationId(clustertimestamp, Integer.valueOf(id.toString())); + } + + public static ApplicationAttemptId newApplicationAttemptId( + ApplicationId appId, CharSequence id) { + return newApplicationAttemptId(appId, Integer.valueOf(id.toString())); + } + public static ApplicationAttemptId newApplicationAttemptId( ApplicationId appId, int attemptId) { - ApplicationAttemptId appAttemptId = - recordFactory.newRecordInstance(ApplicationAttemptId.class); - appAttemptId.setApplicationId(appId); + ApplicationAttemptIdProto.Builder appAttemptId = + ApplicationAttemptIdProto.newBuilder(); appAttemptId.setAttemptId(attemptId); - return appAttemptId; - } - - public static ApplicationId convert(long clustertimestamp, CharSequence id) { - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setId(Integer.valueOf(id.toString())); - applicationId.setClusterTimestamp(clustertimestamp); - return applicationId; + return new ApplicationAttemptId(appId, appAttemptId); } public static ContainerId newContainerId(ApplicationAttemptId appAttemptId, int containerId) { - ContainerId id = recordFactory.newRecordInstance(ContainerId.class); + ContainerIdProto.Builder id = ContainerIdProto.newBuilder(); id.setId(containerId); - id.setApplicationAttemptId(appAttemptId); - return id; + return new ContainerId((ApplicationAttemptId)appAttemptId, id); } public static ContainerId newContainerId(int appId, int appAttemptId, @@ -185,30 +174,17 @@ public static ContainerId newContainerId(int appId, int appAttemptId, ContainerId cId = newContainerId(applicationAttemptId, id); return cId; } - - public static ContainerId newContainerId(RecordFactory recordFactory, - ApplicationId appId, ApplicationAttemptId appAttemptId, - int containerId) { - ContainerId id = recordFactory.newRecordInstance(ContainerId.class); - id.setId(containerId); - id.setApplicationAttemptId(appAttemptId); - return id; - } - - public static ContainerId newContainerId(RecordFactory recordFactory, - ApplicationAttemptId appAttemptId, - int containerId) { - ContainerId id = recordFactory.newRecordInstance(ContainerId.class); - id.setApplicationAttemptId(appAttemptId); - id.setId(containerId); - return id; + + public static ContainerId newContainerId(ApplicationAttemptId appAttemptId, + CharSequence id) { + return newContainerId(appAttemptId, Integer.valueOf(id.toString())); } public static NodeId newNodeId(String host, int port) { - NodeId nodeId = recordFactory.newRecordInstance(NodeId.class); + NodeIdProto.Builder nodeId = NodeIdProto.newBuilder(); nodeId.setHost(host); nodeId.setPort(port); - return nodeId; + return new NodeId(nodeId.build()); } public static NodeReport newNodeReport(NodeId nodeId, NodeState nodeState, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java index 21fe2d9..a06ccd4 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java @@ -33,7 +33,6 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.URL; -import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; @@ -105,40 +104,15 @@ public static String toString(ApplicationId appId) { return appId.toString(); } - public static ApplicationId toApplicationId(RecordFactory recordFactory, - String appIdStr) { - Iterator it = _split(appIdStr).iterator(); - it.next(); // prefix. TODO: Validate application prefix - return toApplicationId(recordFactory, it); - } - - private static ApplicationId toApplicationId(RecordFactory recordFactory, - Iterator it) { - ApplicationId appId = - recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(Long.parseLong(it.next())); - appId.setId(Integer.parseInt(it.next())); - return appId; - } - private static ApplicationAttemptId toApplicationAttemptId( Iterator it) throws NumberFormatException { - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(Long.parseLong(it.next())); - appId.setId(Integer.parseInt(it.next())); - ApplicationAttemptId appAttemptId = Records - .newRecord(ApplicationAttemptId.class); - appAttemptId.setApplicationId(appId); - appAttemptId.setAttemptId(Integer.parseInt(it.next())); - return appAttemptId; + ApplicationId appId = BuilderUtils.newApplicationId(it.next(), it.next()); + return BuilderUtils.newApplicationAttemptId(appId, it.next()); } private static ApplicationId toApplicationId( Iterator it) throws NumberFormatException { - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(Long.parseLong(it.next())); - appId.setId(Integer.parseInt(it.next())); - return appId; + return BuilderUtils.newApplicationId(it.next(), it.next()); } public static String toString(ContainerId cId) { @@ -168,10 +142,7 @@ public static ContainerId toContainerId(String containerIdStr) { } try { ApplicationAttemptId appAttemptID = toApplicationAttemptId(it); - ContainerId containerId = Records.newRecord(ContainerId.class); - containerId.setApplicationAttemptId(appAttemptID); - containerId.setId(Integer.parseInt(it.next())); - return containerId; + return BuilderUtils.newContainerId(appAttemptID, it.next()); } catch (NumberFormatException n) { throw new IllegalArgumentException("Invalid ContainerId: " + containerIdStr, n); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java index cc67ff7..d09ea09 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java @@ -23,7 +23,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.YarnApplicationState; -import org.apache.hadoop.yarn.util.Records; +import org.apache.hadoop.yarn.util.BuilderUtils; import com.google.common.collect.Iterators; @@ -62,17 +62,11 @@ public static String newQueue() { } public static ApplicationId newAppID(int i) { - ApplicationId id = Records.newRecord(ApplicationId.class); - id.setClusterTimestamp(TS); - id.setId(i); - return id; + return BuilderUtils.newApplicationId(TS, i); } public static ApplicationAttemptId newAppAttemptID(ApplicationId appId, int i) { - ApplicationAttemptId id = Records.newRecord(ApplicationAttemptId.class); - id.setApplicationId(appId); - id.setAttemptId(i); - return id; + return BuilderUtils.newApplicationAttemptId(appId, i); } public static YarnApplicationState newAppState() { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java index b18588d..8f62955 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java @@ -50,6 +50,7 @@ import org.apache.hadoop.yarn.factory.providers.YarnRemoteExceptionFactoryProvider; import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC; import org.apache.hadoop.yarn.ipc.YarnRPC; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Test; /* @@ -89,18 +90,11 @@ private void testRPCTimeout(String rpcClass) throws Exception { ContainerLaunchContext containerLaunchContext = recordFactory .newRecordInstance(ContainerLaunchContext.class); containerLaunchContext.setUser("dummy-user"); - ContainerId containerId = recordFactory - .newRecordInstance(ContainerId.class); - ApplicationId applicationId = recordFactory - .newRecordInstance(ApplicationId.class); - ApplicationAttemptId applicationAttemptId = recordFactory - .newRecordInstance(ApplicationAttemptId.class); - applicationId.setClusterTimestamp(0); - applicationId.setId(0); - applicationAttemptId.setApplicationId(applicationId); - applicationAttemptId.setAttemptId(0); - containerId.setApplicationAttemptId(applicationAttemptId); - containerId.setId(100); + ApplicationId applicationId = BuilderUtils.newApplicationId(0, 0); + ApplicationAttemptId applicationAttemptId = + BuilderUtils.newApplicationAttemptId(applicationId, 0); + ContainerId containerId = + BuilderUtils.newContainerId(applicationAttemptId, 100); containerLaunchContext.setContainerId(containerId); containerLaunchContext.setResource(recordFactory .newRecordInstance(Resource.class)); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java index 6975db2..c1c6754 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java @@ -52,6 +52,7 @@ import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.ipc.YarnRPC; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.Records; import org.junit.Test; @@ -112,18 +113,11 @@ private void test(String rpcClass) throws Exception { ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); containerLaunchContext.setUser("dummy-user"); - ContainerId containerId = - recordFactory.newRecordInstance(ContainerId.class); - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); + ApplicationId applicationId = BuilderUtils.newApplicationId(0, 0); ApplicationAttemptId applicationAttemptId = - recordFactory.newRecordInstance(ApplicationAttemptId.class); - applicationId.setClusterTimestamp(0); - applicationId.setId(0); - applicationAttemptId.setApplicationId(applicationId); - applicationAttemptId.setAttemptId(0); - containerId.setApplicationAttemptId(applicationAttemptId); - containerId.setId(100); + BuilderUtils.newApplicationAttemptId(applicationId, 0); + ContainerId containerId = + BuilderUtils.newContainerId(applicationAttemptId, 100); containerLaunchContext.setContainerId(containerId); containerLaunchContext.setResource( recordFactory.newRecordInstance(Resource.class)); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java index 764ab42..a4b7aca 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java @@ -23,7 +23,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.Records; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Test; public class TestApplicationAttemptId { @@ -59,14 +59,8 @@ public void testApplicationAttemptId() { private ApplicationAttemptId createAppAttemptId(long clusterTimeStamp, int id, int attemptId) { - ApplicationAttemptId appAttemptId = - Records.newRecord(ApplicationAttemptId.class); - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(clusterTimeStamp); - appId.setId(id); - appAttemptId.setApplicationId(appId); - appAttemptId.setAttemptId(attemptId); - return appAttemptId; + ApplicationId appId = BuilderUtils.newApplicationId(clusterTimeStamp, id); + return BuilderUtils.newApplicationAttemptId(appId, attemptId); } public static void main(String[] args) throws Exception { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java index a7d701a..e5c85a2 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java @@ -21,17 +21,17 @@ import junit.framework.Assert; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.Records; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Test; public class TestApplicationId { @Test public void testApplicationId() { - ApplicationId a1 = createAppId(10l, 1); - ApplicationId a2 = createAppId(10l, 2); - ApplicationId a3 = createAppId(10l, 1); - ApplicationId a4 = createAppId(8l, 3); + ApplicationId a1 = BuilderUtils.newApplicationId(10l, 1); + ApplicationId a2 = BuilderUtils.newApplicationId(10l, 2); + ApplicationId a3 = BuilderUtils.newApplicationId(10l, 1); + ApplicationId a4 = BuilderUtils.newApplicationId(8l, 3); Assert.assertFalse(a1.equals(a2)); Assert.assertFalse(a1.equals(a4)); @@ -46,15 +46,8 @@ public void testApplicationId() { Assert.assertFalse(a2.hashCode() == a4.hashCode()); long ts = System.currentTimeMillis(); - ApplicationId a5 = createAppId(ts, 45436343); + ApplicationId a5 = BuilderUtils.newApplicationId(ts, 45436343); Assert.assertEquals("application_10_0001", a1.toString()); Assert.assertEquals("application_" + ts + "_45436343", a5.toString()); } - - private ApplicationId createAppId(long clusterTimeStamp, int id) { - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(clusterTimeStamp); - appId.setId(id); - return appId; - } } \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java index ac3a2a0..059fcae 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java @@ -24,7 +24,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.util.Records; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Test; public class TestContainerId { @@ -64,25 +64,15 @@ private ContainerId createContainerId(long clusterTimestamp, int appIdInt, ApplicationId appId = createAppId(clusterTimestamp, appIdInt); ApplicationAttemptId appAttemptId = createAppAttemptId(appId, appAttemptIdInt); - ContainerId containerId = Records.newRecord(ContainerId.class); - containerId.setApplicationAttemptId(appAttemptId); - containerId.setId(containerIdInt); - return containerId; + return BuilderUtils.newContainerId(appAttemptId, containerIdInt); } private ApplicationId createAppId(long clusterTimeStamp, int id) { - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(clusterTimeStamp); - appId.setId(id); - return appId; + return BuilderUtils.newApplicationId(clusterTimeStamp, id); } private ApplicationAttemptId createAppAttemptId(ApplicationId appId, int attemptId) { - ApplicationAttemptId appAttemptId = - Records.newRecord(ApplicationAttemptId.class); - appAttemptId.setApplicationId(appId); - appAttemptId.setAttemptId(attemptId); - return appAttemptId; + return BuilderUtils.newApplicationAttemptId(appId, attemptId); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestNodeId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestNodeId.java index 45358cb..4c3884e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestNodeId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestNodeId.java @@ -21,7 +21,7 @@ import junit.framework.Assert; import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.util.Records; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Test; public class TestNodeId { @@ -48,9 +48,6 @@ public void testNodeId() { } private NodeId createNodeId(String host, int port) { - NodeId nodeId = Records.newRecord(NodeId.class); - nodeId.setHost(host); - nodeId.setPort(port); - return nodeId; + return BuilderUtils.newNodeId(host, port); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java index 080a79c..cc6148b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatResponsePBImpl.java @@ -25,8 +25,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto; import org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto; @@ -283,20 +281,20 @@ public void setNextHeartBeatInterval(long nextHeartBeatInterval) { builder.setNextHeartBeatInterval(nextHeartBeatInterval); } - private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) { - return new ContainerIdPBImpl(p); + private ContainerId convertFromProtoFormat(ContainerIdProto p) { + return new ContainerId(p); } private ContainerIdProto convertToProtoFormat(ContainerId t) { - return ((ContainerIdPBImpl) t).getProto(); + return t.getProto(); } - private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) { - return new ApplicationIdPBImpl(p); + private ApplicationId convertFromProtoFormat(ApplicationIdProto p) { + return new ApplicationId(p); } private ApplicationIdProto convertToProtoFormat(ApplicationId t) { - return ((ApplicationIdPBImpl) t).getProto(); + return t.getProto(); } private NodeAction convertFromProtoFormat(NodeActionProto p) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RegisterNodeManagerRequestPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RegisterNodeManagerRequestPBImpl.java index 385971b..1b36143 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RegisterNodeManagerRequestPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/RegisterNodeManagerRequestPBImpl.java @@ -22,7 +22,6 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.ProtoBase; import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto; @@ -139,12 +138,12 @@ public void setHttpPort(int httpPort) { builder.setHttpPort(httpPort); } - private NodeIdPBImpl convertFromProtoFormat(NodeIdProto p) { - return new NodeIdPBImpl(p); + private NodeId convertFromProtoFormat(NodeIdProto p) { + return new NodeId(p); } private NodeIdProto convertToProtoFormat(NodeId t) { - return ((NodeIdPBImpl)t).getProto(); + return t.getProto(); } private ResourcePBImpl convertFromProtoFormat(ResourceProto p) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java index 8b5ff01..3058133 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java @@ -28,10 +28,8 @@ import org.apache.hadoop.yarn.api.records.NodeHealthStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.ProtoBase; -import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerStatusPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.NodeHealthStatusPBImpl; -import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto; import org.apache.hadoop.yarn.proto.YarnProtos.NodeHealthStatusProto; @@ -280,11 +278,11 @@ public synchronized void setNodeHealthStatus(NodeHealthStatus healthStatus) { } private NodeIdProto convertToProtoFormat(NodeId nodeId) { - return ((NodeIdPBImpl)nodeId).getProto(); + return nodeId.getProto(); } private NodeId convertFromProtoFormat(NodeIdProto proto) { - return new NodeIdPBImpl(proto); + return new NodeId(proto); } private NodeHealthStatusProto convertToProtoFormat( @@ -304,11 +302,11 @@ private ContainerStatusProto convertToProtoFormat(ContainerStatus c) { return ((ContainerStatusPBImpl)c).getProto(); } - private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto c) { - return new ApplicationIdPBImpl(c); + private ApplicationId convertFromProtoFormat(ApplicationIdProto c) { + return new ApplicationId(c); } private ApplicationIdProto convertToProtoFormat(ApplicationId c) { - return ((ApplicationIdPBImpl)c).getProto(); + return c.getProto(); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/Context.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/Context.java index 4fd206f..08e1e29 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/Context.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/Context.java @@ -41,6 +41,13 @@ */ NodeId getNodeId(); + /** + * Set the node id. This is intended to only be set once when the server + * connection is established. + * @param nodeId + */ + void setNodeId(NodeId nodeId); + ConcurrentMap getApplications(); ConcurrentMap getContainers(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java index 7a53eb9..64aecd9 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java @@ -56,7 +56,6 @@ import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.service.CompositeService; import org.apache.hadoop.yarn.service.Service; -import org.apache.hadoop.yarn.util.Records; import com.google.common.annotations.VisibleForTesting; @@ -252,7 +251,7 @@ protected void cleanupContainers() { public static class NMContext implements Context { - private final NodeId nodeId = Records.newRecord(NodeId.class); + private NodeId nodeId = new NodeId(); private final ConcurrentMap applications = new ConcurrentHashMap(); private final ConcurrentMap containers = @@ -277,6 +276,11 @@ public NMContext(NMContainerTokenSecretManager containerTokenSecretManager) { public NodeId getNodeId() { return this.nodeId; } + + @Override + public void setNodeId(NodeId nodeId) { + this.nodeId = nodeId; + } @Override public ConcurrentMap getApplications() { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java index b4a0034..114b753 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.List; import java.util.Map; import java.util.Set; @@ -101,6 +99,7 @@ import org.apache.hadoop.yarn.service.CompositeService; import org.apache.hadoop.yarn.service.Service; import org.apache.hadoop.yarn.service.ServiceStateChangeListener; +import org.apache.hadoop.yarn.util.BuilderUtils; public class ContainerManagerImpl extends CompositeService implements ServiceStateChangeListener, ContainerManager, @@ -242,8 +241,8 @@ public void start() { server.start(); InetSocketAddress connectAddress = NetUtils.getConnectAddress(server); - this.context.getNodeId().setHost(connectAddress.getHostName()); - this.context.getNodeId().setPort(connectAddress.getPort()); + this.context.setNodeId(BuilderUtils.newNodeId(connectAddress.getHostName(), + connectAddress.getPort())); LOG.info("ContainerManager started at " + connectAddress); super.start(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java index f183d9e..7e48ea2 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java @@ -40,7 +40,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy; @@ -91,7 +90,6 @@ private LocalDirsHandlerService dirsHandler; Path remoteRootLogDir; String remoteRootLogDirSuffix; - private NodeId nodeId; private final ConcurrentMap appLogAggregators; @@ -125,9 +123,6 @@ public synchronized void init(Configuration conf) { @Override public synchronized void start() { - // NodeId is only available during start, the following cannot be moved - // anywhere else. - this.nodeId = this.context.getNodeId(); super.start(); } @@ -210,7 +205,7 @@ void verifyAndCreateRemoteLogDir(Configuration conf) { Path getRemoteNodeLogFileForApp(ApplicationId appId, String user) { return LogAggregationUtils.getRemoteNodeLogFileForApp( - this.remoteRootLogDir, appId, user, this.nodeId, + this.remoteRootLogDir, appId, user, this.context.getNodeId(), this.remoteRootLogDirSuffix); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java index 1a92491..e52a798 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java @@ -23,10 +23,7 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppInfo; @@ -63,21 +60,16 @@ private String containersTableInit() { YarnWebParams { private final Context nmContext; - private final Configuration conf; - private final RecordFactory recordFactory; @Inject - public ApplicationBlock(Context nmContext, Configuration conf) { - this.conf = conf; + public ApplicationBlock(Context nmContext) { this.nmContext = nmContext; - this.recordFactory = RecordFactoryProvider.getRecordFactory(this.conf); } @Override protected void render(Block html) { ApplicationId applicationID = - ConverterUtils.toApplicationId(this.recordFactory, - $(APPLICATION_ID)); + ConverterUtils.toApplicationId($(APPLICATION_ID)); Application app = this.nmContext.getApplications().get(applicationID); AppInfo info = new AppInfo(app); info("Application's information") diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java index 168f18a..101146f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java @@ -30,8 +30,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.ResourceView; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; @@ -56,8 +54,6 @@ private Context nmContext; private ResourceView rview; private WebApp webapp; - private static RecordFactory recordFactory = RecordFactoryProvider - .getRecordFactory(null); private @javax.ws.rs.core.Context HttpServletResponse response; @@ -128,7 +124,7 @@ public AppsInfo getNodeApps(@QueryParam("state") String stateQuery, @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public AppInfo getNodeApp(@PathParam("appid") String appId) { init(); - ApplicationId id = ConverterUtils.toApplicationId(recordFactory, appId); + ApplicationId id = ConverterUtils.toApplicationId(appId); if (id == null) { throw new NotFoundException("app with id " + appId + " not found"); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java index b1283b5..674efba 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java @@ -42,6 +42,7 @@ import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Test; public class TestEventFlow { @@ -107,16 +108,10 @@ protected void startStatusUpdater() { ContainerLaunchContext launchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); - ContainerId cID = recordFactory.newRecordInstance(ContainerId.class); - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setClusterTimestamp(0); - applicationId.setId(0); - ApplicationAttemptId applicationAttemptId = - recordFactory.newRecordInstance(ApplicationAttemptId.class); - applicationAttemptId.setApplicationId(applicationId); - applicationAttemptId.setAttemptId(0); - cID.setApplicationAttemptId(applicationAttemptId); + ApplicationId applicationId = BuilderUtils.newApplicationId(0, 0); + ApplicationAttemptId applicationAttemptId = + BuilderUtils.newApplicationAttemptId(applicationId, 0); + ContainerId cID = BuilderUtils.newContainerId(applicationAttemptId, 0); launchContext.setContainerId(cID); launchContext.setUser("testing"); launchContext.setResource(recordFactory.newRecordInstance(Resource.class)); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java index 10a85c7..aaecf5f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java @@ -50,6 +50,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.Records; import org.junit.After; @@ -216,17 +217,10 @@ private void createFiles(String dir, String subDir, int numOfFiles) { } private ContainerId createContainerId() { - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = BuilderUtils.newApplicationId(0, 0); ApplicationAttemptId appAttemptId = - Records.newRecord(ApplicationAttemptId.class); - appAttemptId.setApplicationId(appId); - appAttemptId.setAttemptId(1); - ContainerId containerId = - Records.newRecord(ContainerId.class); - containerId.setApplicationAttemptId(appAttemptId); - return containerId; + BuilderUtils.newApplicationAttemptId(appId, 1); + return BuilderUtils.newContainerId(appAttemptId, 0); } private class MyNodeManager extends NodeManager { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java index f422617..1c09046 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java @@ -52,6 +52,7 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; import org.junit.After; import org.junit.Before; @@ -171,17 +172,10 @@ public void testKillContainersOnShutdown() throws IOException { } private ContainerId createContainerId() { - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = BuilderUtils.newApplicationId(0, 0); ApplicationAttemptId appAttemptId = - recordFactory.newRecordInstance(ApplicationAttemptId.class); - appAttemptId.setApplicationId(appId); - appAttemptId.setAttemptId(1); - ContainerId containerId = - recordFactory.newRecordInstance(ContainerId.class); - containerId.setApplicationAttemptId(appAttemptId); - return containerId; + BuilderUtils.newApplicationAttemptId(appId, 1); + return BuilderUtils.newContainerId(appAttemptId, 0); } private YarnConfiguration createNMConfig() { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java index 59b65e2..aa7c72c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java @@ -129,14 +129,10 @@ public RegisterNodeManagerResponse registerNodeManager( return response; } - ApplicationId applicationID = recordFactory - .newRecordInstance(ApplicationId.class); - ApplicationAttemptId appAttemptID = recordFactory - .newRecordInstance(ApplicationAttemptId.class); - ContainerId firstContainerID = recordFactory - .newRecordInstance(ContainerId.class); - ContainerId secondContainerID = recordFactory - .newRecordInstance(ContainerId.class); + ApplicationId applicationID; + ApplicationAttemptId appAttemptID; + ContainerId firstContainerID; + ContainerId secondContainerID; private Map> getAppToContainerStatusMap( List containers) { @@ -167,10 +163,9 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) Assert.assertEquals(0, nodeStatus.getContainersStatuses().size()); // Give a container to the NM. - applicationID.setId(heartBeatID); - appAttemptID.setApplicationId(applicationID); - firstContainerID.setApplicationAttemptId(appAttemptID); - firstContainerID.setId(heartBeatID); + applicationID = BuilderUtils.newApplicationId(0, heartBeatID); + appAttemptID = BuilderUtils.newApplicationAttemptId(applicationID, 0); + firstContainerID = BuilderUtils.newContainerId(appAttemptID, heartBeatID); ContainerLaunchContext launchContext = recordFactory .newRecordInstance(ContainerLaunchContext.class); launchContext.setContainerId(firstContainerID); @@ -191,10 +186,9 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) Assert.assertEquals(1, activeContainers.size()); // Give another container to the NM. - applicationID.setId(heartBeatID); - appAttemptID.setApplicationId(applicationID); - secondContainerID.setApplicationAttemptId(appAttemptID); - secondContainerID.setId(heartBeatID); + applicationID = BuilderUtils.newApplicationId(0, heartBeatID); + appAttemptID = BuilderUtils.newApplicationAttemptId(applicationID, 0); + secondContainerID = BuilderUtils.newContainerId(appAttemptID, heartBeatID); ContainerLaunchContext launchContext = recordFactory .newRecordInstance(ContainerLaunchContext.class); launchContext.setContainerId(secondContainerID); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java index 46c9faa..6150c0e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java @@ -30,14 +30,13 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices; import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType; import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.service.Service; +import org.apache.hadoop.yarn.util.BuilderUtils; import static org.apache.hadoop.yarn.service.Service.STATE.*; @@ -123,8 +122,7 @@ public void testAuxEventDispatch() { aux.init(conf); aux.start(); - ApplicationId appId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class); - appId.setId(65); + ApplicationId appId = BuilderUtils.newApplicationId(0, 65); ByteBuffer buf = ByteBuffer.allocate(6); buf.putChar('A'); buf.putInt(65); @@ -132,7 +130,7 @@ public void testAuxEventDispatch() { AuxServicesEvent event = new AuxServicesEvent( AuxServicesEventType.APPLICATION_INIT, "user0", appId, "Asrv", buf); aux.handle(event); - appId.setId(66); + appId = BuilderUtils.newApplicationId(0, 66); event = new AuxServicesEvent( AuxServicesEventType.APPLICATION_STOP, "user0", appId, "Bsrv", null); // verify all services got the stop event diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java index 5b01cc0..6dacb86 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java @@ -58,6 +58,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; import org.junit.Test; @@ -72,17 +73,10 @@ public TestContainerManager() throws UnsupportedFileSystemException { } private ContainerId createContainerId() { - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = BuilderUtils.newApplicationId(0, 0); ApplicationAttemptId appAttemptId = - recordFactory.newRecordInstance(ApplicationAttemptId.class); - appAttemptId.setApplicationId(appId); - appAttemptId.setAttemptId(1); - ContainerId containerId = - recordFactory.newRecordInstance(ContainerId.class); - containerId.setApplicationAttemptId(appAttemptId); - return containerId; + BuilderUtils.newApplicationAttemptId(appId, 1); + return BuilderUtils.newContainerId(appAttemptId, 0); } @Test diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java index 822835d..265be3e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java @@ -57,6 +57,7 @@ import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.Signal; import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest; import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin; import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin; @@ -185,16 +186,10 @@ public void testContainerEnvVariables() throws Exception { recordFactory.newRecordInstance(ContainerLaunchContext.class); // ////// Construct the Container-id - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = BuilderUtils.newApplicationId(0, 0); ApplicationAttemptId appAttemptId = - recordFactory.newRecordInstance(ApplicationAttemptId.class); - appAttemptId.setApplicationId(appId); - appAttemptId.setAttemptId(1); - ContainerId cId = - recordFactory.newRecordInstance(ContainerId.class); - cId.setApplicationAttemptId(appAttemptId); + BuilderUtils.newApplicationAttemptId(appId, 1); + ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0); containerLaunchContext.setContainerId(cId); containerLaunchContext.setUser(user); @@ -303,16 +298,10 @@ public void testDelayedKill() throws Exception { recordFactory.newRecordInstance(ContainerLaunchContext.class); // ////// Construct the Container-id - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(1); - appId.setId(1); + ApplicationId appId = BuilderUtils.newApplicationId(1, 1); ApplicationAttemptId appAttemptId = - recordFactory.newRecordInstance(ApplicationAttemptId.class); - appAttemptId.setApplicationId(appId); - appAttemptId.setAttemptId(1); - ContainerId cId = - recordFactory.newRecordInstance(ContainerId.class); - cId.setApplicationAttemptId(appAttemptId); + BuilderUtils.newApplicationAttemptId(appId, 1); + ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0); containerLaunchContext.setContainerId(cId); containerLaunchContext.setUser(user); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalizedResource.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalizedResource.java index 07d8df1..56de145 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalizedResource.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalizedResource.java @@ -44,22 +44,17 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceLocalizedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceReleaseEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRequestEvent; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Test; import org.mockito.ArgumentMatcher; public class TestLocalizedResource { static ContainerId getMockContainer(int id) { - ApplicationId appId = mock(ApplicationId.class); - when(appId.getClusterTimestamp()).thenReturn(314159265L); - when(appId.getId()).thenReturn(3); - ApplicationAttemptId appAttemptId = mock(ApplicationAttemptId.class); - when(appAttemptId.getApplicationId()).thenReturn(appId); - when(appAttemptId.getAttemptId()).thenReturn(0); - ContainerId container = mock(ContainerId.class); - when(container.getId()).thenReturn(id); - when(container.getApplicationAttemptId()).thenReturn(appAttemptId); - return container; + ApplicationId appId = BuilderUtils.newApplicationId(314159265L, 3); + ApplicationAttemptId appAttemptId = + BuilderUtils.newApplicationAttemptId(appId, 0); + return BuilderUtils.newContainerId(appAttemptId, id); } @Test diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java index 6a9a676..9b08f4a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java @@ -47,8 +47,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; -import org.apache.hadoop.io.DataInputBuffer; -import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.YarnException; @@ -681,10 +679,7 @@ public void testLogAggregationForRealContainerLaunch() throws IOException, recordFactory.newRecordInstance(ContainerLaunchContext.class); // ////// Construct the Container-id - ApplicationId appId = - recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = BuilderUtils.newApplicationId(0, 0); ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(appId, 1); ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java index 99d7d4d..8dc792d 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java @@ -60,6 +60,7 @@ import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.Signal; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin; import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree; @@ -199,17 +200,10 @@ public void testContainerKillOnMemoryOverflow() throws IOException, recordFactory.newRecordInstance(ContainerLaunchContext.class); // ////// Construct the Container-id - ApplicationId appId = - recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = BuilderUtils.newApplicationId(0, 0); ApplicationAttemptId appAttemptId = - recordFactory.newRecordInstance(ApplicationAttemptId.class); - appAttemptId.setApplicationId(appId); - appAttemptId.setAttemptId(1); - ContainerId cId = recordFactory.newRecordInstance(ContainerId.class); - cId.setId(0); - cId.setApplicationAttemptId(appAttemptId); + BuilderUtils.newApplicationAttemptId(appId, 1); + ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0); containerLaunchContext.setContainerId(cId); containerLaunchContext.setUser(user); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java index 93fca8f..659110b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java @@ -21,11 +21,8 @@ import java.util.HashMap; import java.util.Map; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState; @@ -48,10 +45,7 @@ public MockApp(String user, long clusterTimeStamp, int uniqId) { super(); this.user = user; // Add an application and the corresponding containers - RecordFactory recordFactory = RecordFactoryProvider - .getRecordFactory(new Configuration()); - this.appId = BuilderUtils.newApplicationId(recordFactory, clusterTimeStamp, - uniqId); + this.appId = BuilderUtils.newApplicationId(clusterTimeStamp, uniqId); appState = ApplicationState.NEW; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java index 519ff18..59ab703 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java @@ -54,8 +54,7 @@ public MockContainer(ApplicationAttemptId appAttemptId, this.user = user; this.recordFactory = RecordFactoryProvider.getRecordFactory(conf); - this.id = BuilderUtils.newContainerId(recordFactory, appId, appAttemptId, - uniqId); + this.id = BuilderUtils.newContainerId(appAttemptId, uniqId); this.launchContext = recordFactory .newRecordInstance(ContainerLaunchContext.class); launchContext.setContainerId(id); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestContainerLogsPage.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestContainerLogsPage.java index 4594939..dbe33cf 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestContainerLogsPage.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestContainerLogsPage.java @@ -30,8 +30,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; @@ -52,18 +50,15 @@ public void testContainerLogDirs() throws IOException { healthChecker.init(conf); LocalDirsHandlerService dirsHandler = healthChecker.getDiskHandler(); // Add an application and the corresponding containers - RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(conf); String user = "nobody"; long clusterTimeStamp = 1234; - ApplicationId appId = BuilderUtils.newApplicationId(recordFactory, - clusterTimeStamp, 1); + ApplicationId appId = BuilderUtils.newApplicationId(clusterTimeStamp, 1); Application app = mock(Application.class); when(app.getUser()).thenReturn(user); when(app.getAppId()).thenReturn(appId); ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId( appId, 1); - ContainerId container1 = BuilderUtils.newContainerId(recordFactory, appId, - appAttemptId, 0); + ContainerId container1 = BuilderUtils.newContainerId(appAttemptId, 0); List files = null; files = ContainerLogsPage.ContainersLogsBlock.getContainerLogDirs( container1, dirsHandler); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java index d29e73e..9708989 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServer.java @@ -162,17 +162,15 @@ public boolean isPmemCheckEnabled() { String user = "nobody"; long clusterTimeStamp = 1234; ApplicationId appId = - BuilderUtils.newApplicationId(recordFactory, clusterTimeStamp, 1); + BuilderUtils.newApplicationId(clusterTimeStamp, 1); Application app = mock(Application.class); when(app.getUser()).thenReturn(user); when(app.getAppId()).thenReturn(appId); nmContext.getApplications().put(appId, app); ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId( appId, 1); - ContainerId container1 = - BuilderUtils.newContainerId(recordFactory, appId, appAttemptId, 0); - ContainerId container2 = - BuilderUtils.newContainerId(recordFactory, appId, appAttemptId, 1); + ContainerId container1 = BuilderUtils.newContainerId(appAttemptId, 0); + ContainerId container2 = BuilderUtils.newContainerId(appAttemptId, 1); NodeManagerMetrics metrics = mock(NodeManagerMetrics.class); for (ContainerId containerId : new ContainerId[] { container1, container2}) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServices.java index 89441d7..4c56d82 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServices.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServices.java @@ -40,6 +40,7 @@ import org.apache.hadoop.yarn.server.nodemanager.ResourceView; import org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer.NMWebApp; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.YarnVersionInfo; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.WebApp; @@ -86,8 +87,7 @@ @Override protected void configureServlets() { nmContext = new NodeManager.NMContext(null); - nmContext.getNodeId().setHost("testhost.foo.com"); - nmContext.getNodeId().setPort(8042); + nmContext.setNodeId(BuilderUtils.newNodeId("testhost.foo.com", 8042)); resourceView = new ResourceView() { @Override public long getVmemAllocatedForContainers() { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java index 39764e6..c5f66ce 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java @@ -92,8 +92,7 @@ @Override protected void configureServlets() { nmContext = new NodeManager.NMContext(null); - nmContext.getNodeId().setHost("testhost.foo.com"); - nmContext.getNodeId().setPort(9999); + nmContext.setNodeId(BuilderUtils.newNodeId("testhost.foo.com", 9999)); resourceView = new ResourceView() { @Override public long getVmemAllocatedForContainers() { @@ -535,7 +534,7 @@ public void testNodeSingleAppsInvalid() throws JSONException, Exception { addAppContainers(app2); try { - r.path("ws").path("v1").path("node").path("apps").path("app_foo_0000") + r.path("ws").path("v1").path("node").path("apps").path("application_foo_0000") .accept(MediaType.APPLICATION_JSON).get(JSONObject.class); fail("should have thrown exception on invalid user query"); } catch (UniformInterfaceException ue) { @@ -549,11 +548,11 @@ public void testNodeSingleAppsInvalid() throws JSONException, Exception { String type = exception.getString("exception"); String classname = exception.getString("javaClassName"); WebServicesTestUtils.checkStringMatch("exception message", - "For input string: \"foo\"", message); + "Invalid AppAttemptId: application_foo_0000", message); WebServicesTestUtils.checkStringMatch("exception type", - "NumberFormatException", type); + "IllegalArgumentException", type); WebServicesTestUtils.checkStringMatch("exception classname", - "java.lang.NumberFormatException", classname); + "java.lang.IllegalArgumentException", classname); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java index 1f678d4..6072696 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java @@ -92,8 +92,7 @@ @Override protected void configureServlets() { nmContext = new NodeManager.NMContext(null); - nmContext.getNodeId().setHost("testhost.foo.com"); - nmContext.getNodeId().setPort(8042); + nmContext.setNodeId(BuilderUtils.newNodeId("testhost.foo.com", 8042)); resourceView = new ResourceView() { @Override public long getVmemAllocatedForContainers() { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java index 1aaca4e..76e16dc 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java @@ -199,7 +199,7 @@ private boolean checkAccess(UserGroupInformation callerUGI, String owner, ApplicationId getNewApplicationId() { ApplicationId applicationId = org.apache.hadoop.yarn.util.BuilderUtils - .newApplicationId(recordFactory, ResourceManager.clusterTimeStamp, + .newApplicationId(ResourceManager.clusterTimeStamp, applicationCounter.incrementAndGet()); LOG.info("Allocated new applicationId: " + applicationId.getId()); return applicationId; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java index 62a3ba7..4e1f74d 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java @@ -68,7 +68,6 @@ import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.yarn.util.BuilderUtils; -import org.apache.hadoop.yarn.util.Records; public class RMAppImpl implements RMApp, Recoverable { @@ -551,10 +550,8 @@ public void recover(RMState state) { @SuppressWarnings("unchecked") private void createNewAttempt(boolean startAttempt) { - ApplicationAttemptId appAttemptId = Records - .newRecord(ApplicationAttemptId.class); - appAttemptId.setApplicationId(applicationId); - appAttemptId.setAttemptId(attempts.size() + 1); + ApplicationAttemptId appAttemptId = + BuilderUtils.newApplicationAttemptId(applicationId, attempts.size() + 1); RMAppAttempt attempt = new RMAppAttemptImpl(appAttemptId, rmContext, scheduler, masterService, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java index 334d3a8..46512d4 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java @@ -40,8 +40,6 @@ import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeState; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; @@ -77,8 +75,6 @@ private static final String EMPTY = ""; private static final Log LOG = LogFactory.getLog(RMWebServices.class); private final ResourceManager rm; - private static RecordFactory recordFactory = RecordFactoryProvider - .getRecordFactory(null); private final ApplicationACLsManager aclsManager; private @Context HttpServletResponse response; @@ -377,7 +373,7 @@ public AppInfo getApp(@Context HttpServletRequest hsr, throw new NotFoundException("appId, " + appId + ", is empty or null"); } ApplicationId id; - id = ConverterUtils.toApplicationId(recordFactory, appId); + id = ConverterUtils.toApplicationId(appId); if (id == null) { throw new NotFoundException("appId is null"); } @@ -398,7 +394,7 @@ public AppAttemptsInfo getAppAttempts(@PathParam("appid") String appId) { throw new NotFoundException("appId, " + appId + ", is empty or null"); } ApplicationId id; - id = ConverterUtils.toApplicationId(recordFactory, appId); + id = ConverterUtils.toApplicationId(appId); if (id == null) { throw new NotFoundException("appId is null"); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java index 20ffeab..b55fa7a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java @@ -51,7 +51,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.Task.State; import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; -import org.apache.hadoop.yarn.util.Records; +import org.apache.hadoop.yarn.util.BuilderUtils; @Private public class Application { @@ -98,9 +98,8 @@ public Application(String user, String queue, ResourceManager resourceManager) { this.resourceManager = resourceManager; this.applicationId = this.resourceManager.getClientRMService().getNewApplicationId(); - this.applicationAttemptId = Records.newRecord(ApplicationAttemptId.class); - this.applicationAttemptId.setApplicationId(this.applicationId); - this.applicationAttemptId.setAttemptId(this.numAttempts.getAndIncrement()); + this.applicationAttemptId = BuilderUtils.newApplicationAttemptId( + applicationId, this.numAttempts.getAndIncrement()); } public String getUser() { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNM.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNM.java index c85f233..8469c8a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNM.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNM.java @@ -54,9 +54,7 @@ public MockNM(String nodeIdStr, int memory, ResourceTrackerService resourceTrack this.memory = memory; this.resourceTracker = resourceTracker; String[] splits = nodeIdStr.split(":"); - nodeId = Records.newRecord(NodeId.class); - nodeId.setHost(splits[0]); - nodeId.setPort(Integer.parseInt(splits[1])); + nodeId = BuilderUtils.newNodeId(splits[0], Integer.parseInt(splits[1])); } public NodeId getNodeId() { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNodes.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNodes.java index c3fe72d..b0593bd 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNodes.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNodes.java @@ -20,7 +20,6 @@ import java.util.ArrayList; import java.util.List; -import java.util.concurrent.ConcurrentLinkedQueue; import org.apache.hadoop.net.Node; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -34,6 +33,7 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.UpdatedContainerInfo; +import org.apache.hadoop.yarn.util.BuilderUtils; import com.google.common.collect.Lists; @@ -72,10 +72,7 @@ } public static NodeId newNodeID(String host, int port) { - NodeId nid = recordFactory.newRecordInstance(NodeId.class); - nid.setHost(host); - nid.setPort(port); - return nid; + return BuilderUtils.newNodeId(host, port); } public static Resource newResource(int mem) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java index 170938d..54d42fe 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java @@ -87,9 +87,7 @@ public NodeManager(String hostName, int containerManagerPort, int httpPort, this.capability = capability; Resources.addTo(available, capability); - this.nodeId = recordFactory.newRecordInstance(NodeId.class); - this.nodeId.setHost(hostName); - this.nodeId.setPort(containerManagerPort); + this.nodeId = BuilderUtils.newNodeId(hostName, containerManagerPort); RegisterNodeManagerRequest request = recordFactory .newRecordInstance(RegisterNodeManagerRequest.class); request.setHttpPort(httpPort); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java index 0d5ef6d..9f0f969 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java @@ -153,8 +153,7 @@ public void testGetApplicationReport() throws YarnRemoteException { RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); GetApplicationReportRequest request = recordFactory .newRecordInstance(GetApplicationReportRequest.class); - request.setApplicationId(recordFactory - .newRecordInstance(ApplicationId.class)); + request.setApplicationId(BuilderUtils.newApplicationId(0, 0)); GetApplicationReportResponse applicationReport = rmService .getApplicationReport(request); Assert.assertNull("It should return null as application report for absent application.", @@ -365,11 +364,7 @@ private void mockRMContext(YarnScheduler yarnScheduler, RMContext rmContext) } private ApplicationId getApplicationId(int id) { - ApplicationId applicationId = recordFactory - .newRecordInstance(ApplicationId.class); - applicationId.setClusterTimestamp(123456); - applicationId.setId(id); - return applicationId; + return BuilderUtils.newApplicationId(123456, id); } private RMAppImpl getRMApp(RMContext rmContext, YarnScheduler yarnScheduler, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java index 61ed065..3de2f31 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java @@ -42,6 +42,7 @@ import org.apache.hadoop.yarn.server.api.records.NodeAction; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.Records; import org.junit.After; import org.junit.Test; @@ -257,9 +258,7 @@ public void testNodeRegistrationFailure() throws Exception { ResourceTrackerService resourceTrackerService = rm.getResourceTrackerService(); RegisterNodeManagerRequest req = Records.newRecord( RegisterNodeManagerRequest.class); - NodeId nodeId = Records.newRecord(NodeId.class); - nodeId.setHost("host2"); - nodeId.setPort(1234); + NodeId nodeId = BuilderUtils.newNodeId("host2", 1234); req.setNodeId(nodeId); req.setHttpPort(1234); // trying to register a invalid node. diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java index afab180..4650cc5 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java @@ -39,6 +39,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.Records; import com.google.common.collect.Lists; @@ -241,8 +242,7 @@ public int pullRMNodeUpdates(Collection updatedNodes) { public static RMApp newApplication(int i) { final ApplicationAttemptId appAttemptId = newAppAttemptID(newAppID(i), 0); final Container masterContainer = Records.newRecord(Container.class); - ContainerId containerId = Records.newRecord(ContainerId.class); - containerId.setApplicationAttemptId(appAttemptId); + ContainerId containerId = BuilderUtils.newContainerId(appAttemptId, 0); masterContainer.setId(containerId); masterContainer.setNodeHttpAddress("node:port"); final String user = newUserName(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMExpiry.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMExpiry.java index bf985d3..8d47ab5 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMExpiry.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestNMExpiry.java @@ -43,7 +43,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEventType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType; import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; -import org.apache.hadoop.yarn.util.Records; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Before; import org.junit.Test; @@ -131,9 +131,7 @@ public void testNMExpiry() throws Exception { RegisterNodeManagerRequest request1 = recordFactory .newRecordInstance(RegisterNodeManagerRequest.class); - NodeId nodeId1 = Records.newRecord(NodeId.class); - nodeId1.setPort(0); - nodeId1.setHost(hostname1); + NodeId nodeId1 = BuilderUtils.newNodeId(hostname1, 0); request1.setNodeId(nodeId1); request1.setHttpPort(0); request1.setResource(capability); @@ -141,9 +139,7 @@ public void testNMExpiry() throws Exception { RegisterNodeManagerRequest request2 = recordFactory .newRecordInstance(RegisterNodeManagerRequest.class); - NodeId nodeId2 = Records.newRecord(NodeId.class); - nodeId2.setPort(0); - nodeId2.setHost(hostname2); + NodeId nodeId2 = BuilderUtils.newNodeId(hostname2, 0); request2.setNodeId(nodeId2); request2.setHttpPort(0); request2.setResource(capability); @@ -159,9 +155,7 @@ public void testNMExpiry() throws Exception { request3 = recordFactory .newRecordInstance(RegisterNodeManagerRequest.class); - NodeId nodeId3 = Records.newRecord(NodeId.class); - nodeId3.setPort(0); - nodeId3.setHost(hostname3); + NodeId nodeId3 = BuilderUtils.newNodeId(hostname3, 0); request3.setNodeId(nodeId3); request3.setHttpPort(0); request3.setResource(capability); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java index 984d7cd..8fab531 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java @@ -43,7 +43,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEventType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType; import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; -import org.apache.hadoop.yarn.util.Records; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -91,9 +91,7 @@ public void testRPCResponseId() throws IOException { String node = "localhost"; Resource capability = recordFactory.newRecordInstance(Resource.class); RegisterNodeManagerRequest request = recordFactory.newRecordInstance(RegisterNodeManagerRequest.class); - nodeId = Records.newRecord(NodeId.class); - nodeId.setHost(node); - nodeId.setPort(1234); + nodeId = BuilderUtils.newNodeId(node, 1234); request.setNodeId(nodeId); request.setHttpPort(0); request.setResource(capability); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFSSchedulerApp.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFSSchedulerApp.java index 62a1b9b..27e63dc 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFSSchedulerApp.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFSSchedulerApp.java @@ -23,24 +23,18 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Priority; -import org.apache.hadoop.yarn.factories.RecordFactory; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Queue; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FSSchedulerApp; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Test; import org.mockito.Mockito; public class TestFSSchedulerApp { - private static RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); - + private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) { - ApplicationAttemptId attId = recordFactory.newRecordInstance(ApplicationAttemptId.class); - ApplicationId appIdImpl = recordFactory.newRecordInstance(ApplicationId.class); - appIdImpl.setId(appId); - attId.setAttemptId(attemptId); - attId.setApplicationId(appIdImpl); - return attId; + ApplicationId appIdImpl = BuilderUtils.newApplicationId(0, appId); + return BuilderUtils.newApplicationAttemptId(appIdImpl, attemptId); } @Test diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java index caab5ed..9cb4b1d 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java @@ -72,6 +72,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.modes.FifoSchedulingMode; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -132,12 +133,8 @@ private Configuration createConfiguration() { } private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) { - ApplicationAttemptId attId = recordFactory.newRecordInstance(ApplicationAttemptId.class); - ApplicationId appIdImpl = recordFactory.newRecordInstance(ApplicationId.class); - appIdImpl.setId(appId); - attId.setAttemptId(attemptId); - attId.setApplicationId(appIdImpl); - return attId; + ApplicationId appIdImpl = BuilderUtils.newApplicationId(0, appId); + return BuilderUtils.newApplicationAttemptId(appIdImpl, attemptId); } @@ -1377,8 +1374,6 @@ public void testFifoWithinQueue() throws Exception { assertEquals(1, app2.getLiveContainers().size()); } - - @SuppressWarnings("unchecked") @Test public void testNotAllowSubmitApplication() throws Exception { // Set acl's @@ -1411,9 +1406,8 @@ public void testNotAllowSubmitApplication() throws Exception { resourceManager.getRMContext().getRMApps().putIfAbsent(applicationId, application); application.handle(new RMAppEvent(applicationId, RMAppEventType.START)); - ApplicationAttemptId attId = recordFactory.newRecordInstance(ApplicationAttemptId.class); - attId.setAttemptId(this.ATTEMPT_ID++); - attId.setApplicationId(applicationId); + ApplicationAttemptId attId = + BuilderUtils.newApplicationAttemptId(applicationId, this.ATTEMPT_ID++); scheduler.addApplication(attId, queue, user); final int MAX_TRIES=20;