diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java index 4ef4d8d..6542cb7 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java @@ -128,9 +128,7 @@ static ApplicationId applicationId; static { - applicationId = recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setClusterTimestamp(0); - applicationId.setId(0); + applicationId = ApplicationId.createApplicationId(0, 0); } public MRApp(int maps, int reduces, boolean autoComplete, String testName, diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java index 0e20d6f..30bd199 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java @@ -789,9 +789,7 @@ public MyAppMaster(Clock clock) { private final Map allJobs; MyAppContext(int numberMaps, int numberReduces) { - myApplicationID = recordFactory.newRecordInstance(ApplicationId.class); - myApplicationID.setClusterTimestamp(clock.getTime()); - myApplicationID.setId(1); + myApplicationID = ApplicationId.createApplicationId(clock.getTime(), 1); myAppAttemptID = recordFactory .newRecordInstance(ApplicationAttemptId.class); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java index 10b79ab..bb48dc7 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java @@ -83,9 +83,7 @@ public void testDeletionofStaging() throws IOException { ApplicationAttemptId attemptId = recordFactory.newRecordInstance( ApplicationAttemptId.class); attemptId.setAttemptId(0); - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(System.currentTimeMillis(), 0); attemptId.setApplicationId(appId); JobId jobid = recordFactory.newRecordInstance(JobId.class); jobid.setAppId(appId); @@ -111,9 +109,7 @@ public void testNoDeletionofStagingOnReboot() throws IOException { ApplicationAttemptId attemptId = recordFactory.newRecordInstance( ApplicationAttemptId.class); attemptId.setAttemptId(0); - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(System.currentTimeMillis(), 0); attemptId.setApplicationId(appId); ContainerAllocator mockAlloc = mock(ContainerAllocator.class); MRAppMaster appMaster = new TestMRApp(attemptId, mockAlloc, @@ -138,9 +134,7 @@ public void testDeletionofStagingOnReboot() throws IOException { ApplicationAttemptId attemptId = recordFactory.newRecordInstance( ApplicationAttemptId.class); attemptId.setAttemptId(1); - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(System.currentTimeMillis(), 0); attemptId.setApplicationId(appId); ContainerAllocator mockAlloc = mock(ContainerAllocator.class); MRAppMaster appMaster = new TestMRApp(attemptId, mockAlloc, @@ -166,9 +160,7 @@ public void testDeletionofStagingOnKill() throws IOException { ApplicationAttemptId attemptId = recordFactory.newRecordInstance( ApplicationAttemptId.class); attemptId.setAttemptId(0); - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(System.currentTimeMillis(), 0); attemptId.setApplicationId(appId); JobId jobid = recordFactory.newRecordInstance(JobId.class); jobid.setAppId(appId); @@ -194,9 +186,7 @@ public void testDeletionofStagingOnKillLastTry() throws IOException { ApplicationAttemptId attemptId = recordFactory.newRecordInstance( ApplicationAttemptId.class); attemptId.setAttemptId(1); - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(System.currentTimeMillis(), 0); attemptId.setApplicationId(appId); JobId jobid = recordFactory.newRecordInstance(JobId.class); jobid.setAppId(appId); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java index 9fd0fb8..16d07d6 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java @@ -224,9 +224,7 @@ public void setup() { metrics = mock(MRAppMetrics.class); dataLocations = new String[1]; - appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(1); + appId = ApplicationId.createApplicationId(System.currentTimeMillis(), 1); jobId = Records.newRecord(JobId.class); jobId.setId(1); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java index ff38ff3..5d12aab 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java @@ -76,9 +76,8 @@ public static JobId toYarn(org.apache.hadoop.mapreduce.JobID id) { JobId jobId = recordFactory.newRecordInstance(JobId.class); jobId.setId(id.getId()); //currently there is 1-1 mapping between appid and jobid - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setId(id.getId()); - appId.setClusterTimestamp(toClusterTimeStamp(id.getJtIdentifier())); + ApplicationId appId = ApplicationId.createApplicationId( + toClusterTimeStamp(id.getJtIdentifier()), id.getId()); jobId.setAppId(appId); return jobId; } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java index e172be5..1a9d8ec 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestShufflePlugin.java @@ -18,29 +18,28 @@ package org.apache.hadoop.mapreduce; -import org.junit.Test; -import static org.junit.Assert.*; -import static org.mockito.Mockito.*; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.fs.LocalDirAllocator; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; + import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.mapred.Task.CombineOutputCollector; +import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.io.compress.CompressionCodec; -import org.apache.hadoop.util.Progress; -import org.apache.hadoop.mapred.Reporter; -import org.apache.hadoop.util.ReflectionUtils; -import org.apache.hadoop.mapreduce.task.reduce.Shuffle; -import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.Counters.Counter; -import org.apache.hadoop.mapred.MapOutputFile; import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.Task; +import org.apache.hadoop.mapred.MapOutputFile; +import org.apache.hadoop.mapred.RawKeyValueIterator; import org.apache.hadoop.mapred.ReduceTask; +import org.apache.hadoop.mapred.Reporter; +import org.apache.hadoop.mapred.ShuffleConsumerPlugin; +import org.apache.hadoop.mapred.Task; +import org.apache.hadoop.mapred.Task.CombineOutputCollector; import org.apache.hadoop.mapred.TaskStatus; import org.apache.hadoop.mapred.TaskUmbilicalProtocol; -import org.apache.hadoop.mapred.ShuffleConsumerPlugin; -import org.apache.hadoop.mapred.RawKeyValueIterator; -import org.apache.hadoop.mapred.Reducer; +import org.apache.hadoop.mapreduce.task.reduce.Shuffle; +import org.apache.hadoop.util.Progress; +import org.apache.hadoop.util.ReflectionUtils; +import org.junit.Test; /** * A JUnit for testing availability and accessibility of shuffle related API. @@ -181,10 +180,6 @@ public void testConsumerApi() { * AuxiliaryService(s) which are "Shuffle-Providers" (ShuffleHandler and 3rd party plugins) */ public void testProviderApi() { - - ApplicationId mockApplicationId = mock(ApplicationId.class); - mockApplicationId.setClusterTimestamp(new Long(10)); - mockApplicationId.setId(mock(JobID.class).getId()); LocalDirAllocator mockLocalDirAllocator = mock(LocalDirAllocator.class); JobConf mockJobConf = mock(JobConf.class); try { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java index 601268a..3f16996 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java @@ -140,9 +140,7 @@ public ApplicationSubmissionContext answer(InvocationOnMock invocation) ).when(yarnRunner).createApplicationSubmissionContext(any(Configuration.class), any(String.class), any(Credentials.class)); - appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(System.currentTimeMillis()); - appId.setId(1); + appId = ApplicationId.createApplicationId(System.currentTimeMillis(), 1); jobId = TypeConverter.fromYarn(appId); if (testWorkDir.exists()) { FileContext.getLocalFSFileContext().delete(new Path(testWorkDir.toString()), true); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java index 0beb430..1e2db2e 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleHandler.java @@ -60,7 +60,6 @@ import org.apache.hadoop.io.ReadaheadPool; import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.security.SecureShuffleUtils; -import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier; import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager; import org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader; @@ -71,6 +70,7 @@ import org.apache.hadoop.metrics2.lib.MutableCounterInt; import org.apache.hadoop.metrics2.lib.MutableCounterLong; import org.apache.hadoop.metrics2.lib.MutableGaugeInt; +import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -78,7 +78,6 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.util.ConverterUtils; -import org.apache.hadoop.yarn.util.Records; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; @@ -542,9 +541,8 @@ protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, // $x/$user/appcache/$appId/output/$mapId // TODO: Once Shuffle is out of NM, this can use MR APIs to convert between App and Job JobID jobID = JobID.forName(jobId); - ApplicationId appID = Records.newRecord(ApplicationId.class); - appID.setClusterTimestamp(Long.parseLong(jobID.getJtIdentifier())); - appID.setId(jobID.getId()); + ApplicationId appID = ApplicationId.createApplicationId( + Long.parseLong(jobID.getJtIdentifier()), jobID.getId()); final String base = ContainerLocalizer.USERCACHE + "/" + user + "/" + ContainerLocalizer.APPCACHE + "/" diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java index ef1f509..21e9e2e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationId.java @@ -72,7 +72,7 @@ */ protected abstract void build(); - public ApplicationId createApplicationId(long clusterTimestamp, int id) { + public static ApplicationId createApplicationId(long clusterTimestamp, int id) { ApplicationId appId = Records.newRecord(ApplicationId.class); appId.setClusterTimestamp(clusterTimestamp); appId.setId(id); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java index ccfc8d9..c45f78f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java @@ -36,7 +36,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; -import org.apache.hadoop.yarn.util.Records; import org.junit.Test; public class TestYarnClient { @@ -80,9 +79,8 @@ public void testSubmitApplication() { for (int i = 0; i < exitStates.length; ++i) { ApplicationSubmissionContext context = mock(ApplicationSubmissionContext.class); - ApplicationId applicationId = Records.newRecord(ApplicationId.class); - applicationId.setClusterTimestamp(System.currentTimeMillis()); - applicationId.setId(i); + ApplicationId applicationId = ApplicationId.createApplicationId( + System.currentTimeMillis(), i); when(context.getApplicationId()).thenReturn(applicationId); ((MockYarnClient) client).setYarnApplicationState(exitStates[i]); try { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java index b3baff7..f5c9fff 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java @@ -18,15 +18,16 @@ package org.apache.hadoop.yarn.util; +import static org.apache.hadoop.yarn.util.StringHelper._split; +import static org.apache.hadoop.yarn.util.StringHelper.join; +import static org.apache.hadoop.yarn.util.StringHelper.sjoin; + import java.util.Iterator; import java.util.Map; import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.yarn.YarnException; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; - -import static org.apache.hadoop.yarn.util.StringHelper.*; /** * Yarn application related utilities @@ -45,10 +46,9 @@ public static ApplicationId toAppID(String prefix, String s, Iterator it throwParseException(sjoin(prefix, ID), s); } shouldHaveNext(prefix, s, it); - ApplicationId appId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(Long.parseLong(it.next())); - shouldHaveNext(prefix, s, it); - appId.setId(Integer.parseInt(it.next())); + ApplicationId appId = ApplicationId.createApplicationId(Long.parseLong(it.next()), Integer.parseInt(it.next())); + +// shouldHaveNext(prefix, s, it); return appId; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java index 7dc25de..b990cb1 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java @@ -126,30 +126,20 @@ public static LocalResource newLocalResource(URI uri, visibility, size, timestamp); } + // TODO Consider getting rid of these in favour of using AplicationId.createApplicationId everywhere. public static ApplicationId newApplicationId(RecordFactory recordFactory, long clustertimestamp, CharSequence id) { - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setId(Integer.valueOf(id.toString())); - applicationId.setClusterTimestamp(clustertimestamp); - return applicationId; + return ApplicationId.createApplicationId(clustertimestamp, + Integer.valueOf(id.toString())); } public static ApplicationId newApplicationId(RecordFactory recordFactory, long clusterTimeStamp, int id) { - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setId(id); - applicationId.setClusterTimestamp(clusterTimeStamp); - return applicationId; + return ApplicationId.createApplicationId(clusterTimeStamp, id); } public static ApplicationId newApplicationId(long clusterTimeStamp, int id) { - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setId(id); - applicationId.setClusterTimestamp(clusterTimeStamp); - return applicationId; + return ApplicationId.createApplicationId(clusterTimeStamp, id); } public static ApplicationAttemptId newApplicationAttemptId( @@ -162,11 +152,8 @@ public static ApplicationAttemptId newApplicationAttemptId( } public static ApplicationId convert(long clustertimestamp, CharSequence id) { - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setId(Integer.valueOf(id.toString())); - applicationId.setClusterTimestamp(clustertimestamp); - return applicationId; + return ApplicationId.createApplicationId(clustertimestamp, + Integer.valueOf(id.toString())); } public static ContainerId newContainerId(ApplicationAttemptId appAttemptId, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java index 21fe2d9..3f9dded 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java @@ -114,18 +114,14 @@ public static ApplicationId toApplicationId(RecordFactory recordFactory, private static ApplicationId toApplicationId(RecordFactory recordFactory, Iterator it) { - ApplicationId appId = - recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(Long.parseLong(it.next())); - appId.setId(Integer.parseInt(it.next())); - return appId; + return ApplicationId.createApplicationId(Long.parseLong(it.next()), + Integer.parseInt(it.next())); } private static ApplicationAttemptId toApplicationAttemptId( Iterator it) throws NumberFormatException { - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(Long.parseLong(it.next())); - appId.setId(Integer.parseInt(it.next())); + ApplicationId appId = ApplicationId.createApplicationId(Long.parseLong(it.next()), + Integer.parseInt(it.next())); ApplicationAttemptId appAttemptId = Records .newRecord(ApplicationAttemptId.class); appAttemptId.setApplicationId(appId); @@ -135,10 +131,8 @@ private static ApplicationAttemptId toApplicationAttemptId( private static ApplicationId toApplicationId( Iterator it) throws NumberFormatException { - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(Long.parseLong(it.next())); - appId.setId(Integer.parseInt(it.next())); - return appId; + return ApplicationId.createApplicationId(Long.parseLong(it.next()), + Integer.parseInt(it.next())); } public static String toString(ContainerId cId) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java index cc67ff7..f07ac67 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java @@ -62,10 +62,7 @@ public static String newQueue() { } public static ApplicationId newAppID(int i) { - ApplicationId id = Records.newRecord(ApplicationId.class); - id.setClusterTimestamp(TS); - id.setId(i); - return id; + return ApplicationId.createApplicationId(TS, i); } public static ApplicationAttemptId newAppAttemptID(ApplicationId appId, int i) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java index 295a38c..225760b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java @@ -93,12 +93,9 @@ private void testRPCTimeout(String rpcClass) throws Exception { containerLaunchContext.setUser("dummy-user"); ContainerId containerId = recordFactory .newRecordInstance(ContainerId.class); - ApplicationId applicationId = recordFactory - .newRecordInstance(ApplicationId.class); + ApplicationId applicationId = ApplicationId.createApplicationId(0, 0); ApplicationAttemptId applicationAttemptId = recordFactory .newRecordInstance(ApplicationAttemptId.class); - applicationId.setClusterTimestamp(0); - applicationId.setId(0); applicationAttemptId.setApplicationId(applicationId); applicationAttemptId.setAttemptId(0); containerId.setApplicationAttemptId(applicationAttemptId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java index 7d941e9..6a0ec81 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java @@ -116,12 +116,10 @@ private void test(String rpcClass) throws Exception { containerLaunchContext.setUser("dummy-user"); ContainerId containerId = recordFactory.newRecordInstance(ContainerId.class); - ApplicationId applicationId = + ApplicationId applicationId = ApplicationId.createApplicationId(0, 0); recordFactory.newRecordInstance(ApplicationId.class); ApplicationAttemptId applicationAttemptId = recordFactory.newRecordInstance(ApplicationAttemptId.class); - applicationId.setClusterTimestamp(0); - applicationId.setId(0); applicationAttemptId.setApplicationId(applicationId); applicationAttemptId.setAttemptId(0); containerId.setApplicationAttemptId(applicationAttemptId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java index 764ab42..32fd974 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationAttemptId.java @@ -61,9 +61,7 @@ private ApplicationAttemptId createAppAttemptId(long clusterTimeStamp, int id, int attemptId) { ApplicationAttemptId appAttemptId = Records.newRecord(ApplicationAttemptId.class); - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(clusterTimeStamp); - appId.setId(id); + ApplicationId appId = ApplicationId.createApplicationId(clusterTimeStamp, id); appAttemptId.setApplicationId(appId); appAttemptId.setAttemptId(attemptId); return appAttemptId; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java index a7d701a..d5863b7 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestApplicationId.java @@ -21,7 +21,6 @@ import junit.framework.Assert; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.util.Records; import org.junit.Test; public class TestApplicationId { @@ -52,9 +51,6 @@ public void testApplicationId() { } private ApplicationId createAppId(long clusterTimeStamp, int id) { - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(clusterTimeStamp); - appId.setId(id); - return appId; + return ApplicationId.createApplicationId(clusterTimeStamp, id); } } \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java index ac3a2a0..efd9b62 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestContainerId.java @@ -71,10 +71,7 @@ private ContainerId createContainerId(long clusterTimestamp, int appIdInt, } private ApplicationId createAppId(long clusterTimeStamp, int id) { - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(clusterTimeStamp); - appId.setId(id); - return appId; + return ApplicationId.createApplicationId(clusterTimeStamp, id); } private ApplicationAttemptId createAppAttemptId(ApplicationId appId, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java index 292d00f..4b56e73 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java @@ -111,10 +111,7 @@ protected void startStatusUpdater() { ContainerLaunchContext launchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); ContainerId cID = recordFactory.newRecordInstance(ContainerId.class); - ApplicationId applicationId = - recordFactory.newRecordInstance(ApplicationId.class); - applicationId.setClusterTimestamp(0); - applicationId.setId(0); + ApplicationId applicationId = ApplicationId.createApplicationId(0, 0); ApplicationAttemptId applicationAttemptId = recordFactory.newRecordInstance(ApplicationAttemptId.class); applicationAttemptId.setApplicationId(applicationId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java index 1436193..8db1d28 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java @@ -222,9 +222,7 @@ private void createFiles(String dir, String subDir, int numOfFiles) { } private ContainerId createContainerId() { - ApplicationId appId = Records.newRecord(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(0, 0); ApplicationAttemptId appAttemptId = Records.newRecord(ApplicationAttemptId.class); appAttemptId.setApplicationId(appId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java index 1792988..0a3d155 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java @@ -24,10 +24,8 @@ import java.io.BufferedReader; import java.io.File; import java.io.FileReader; -import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; -import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -62,7 +60,6 @@ import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; -import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl; import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.util.BuilderUtils; @@ -219,9 +216,7 @@ private void startContainers(NodeManager nm) throws IOException { } private ContainerId createContainerId() { - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(0, 0); ApplicationAttemptId appAttemptId = recordFactory.newRecordInstance(ApplicationAttemptId.class); appAttemptId.setApplicationId(appId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java index 29d6a4c..6291d68 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java @@ -138,8 +138,8 @@ public RegisterNodeManagerResponse registerNodeManager( return response; } - ApplicationId applicationID = recordFactory - .newRecordInstance(ApplicationId.class); +// ApplicationId applicationID = recordFactory +// .newRecordInstance(ApplicationId.class); ApplicationAttemptId appAttemptID = recordFactory .newRecordInstance(ApplicationAttemptId.class); ContainerId firstContainerID = recordFactory @@ -178,12 +178,15 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) getAppToContainerStatusMap(nodeStatus.getContainersStatuses()); org.apache.hadoop.yarn.api.records.Container mockContainer = mock(org.apache.hadoop.yarn.api.records.Container.class); + + ApplicationId appId1 = ApplicationId.createApplicationId(0, 1); + ApplicationId appId2 = ApplicationId.createApplicationId(0, 2); + if (heartBeatID == 1) { Assert.assertEquals(0, nodeStatus.getContainersStatuses().size()); // Give a container to the NM. - applicationID.setId(heartBeatID); - appAttemptID.setApplicationId(applicationID); + appAttemptID.setApplicationId(appId1); firstContainerID.setApplicationAttemptId(appAttemptID); firstContainerID.setId(heartBeatID); ContainerLaunchContext launchContext = recordFactory @@ -200,7 +203,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) Assert.assertEquals("Number of applications should only be one!", 1, nodeStatus.getContainersStatuses().size()); Assert.assertEquals("Number of container for the app should be one!", - 1, appToContainers.get(applicationID).size()); + 1, appToContainers.get(appId1).size()); // Checks on the NM end ConcurrentMap activeContainers = @@ -208,8 +211,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) Assert.assertEquals(1, activeContainers.size()); // Give another container to the NM. - applicationID.setId(heartBeatID); - appAttemptID.setApplicationId(applicationID); + appAttemptID.setApplicationId(appId2); secondContainerID.setApplicationAttemptId(appAttemptID); secondContainerID.setId(heartBeatID); ContainerLaunchContext launchContext = recordFactory @@ -226,7 +228,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) Assert.assertEquals("Number of applications should only be one!", 1, appToContainers.size()); Assert.assertEquals("Number of container for the app should be two!", - 2, appToContainers.get(applicationID).size()); + 2, appToContainers.get(appId2).size()); // Checks on the NM end ConcurrentMap activeContainers = diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java index 46c9faa..a1600a5 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java @@ -18,8 +18,12 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager; -import org.junit.Test; -import static org.junit.Assert.*; +import static org.apache.hadoop.yarn.service.Service.STATE.INITED; +import static org.apache.hadoop.yarn.service.Service.STATE.STARTED; +import static org.apache.hadoop.yarn.service.Service.STATE.STOPPED; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.nio.ByteBuffer; import java.util.ArrayList; @@ -30,17 +34,10 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; -import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices; -import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEvent; -import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType; import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.service.Service; - - -import static org.apache.hadoop.yarn.service.Service.STATE.*; +import org.junit.Test; public class TestAuxServices { private static final Log LOG = LogFactory.getLog(TestAuxServices.class); @@ -123,18 +120,18 @@ public void testAuxEventDispatch() { aux.init(conf); aux.start(); - ApplicationId appId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class); - appId.setId(65); + ApplicationId appId1 = ApplicationId.createApplicationId(0, 65); ByteBuffer buf = ByteBuffer.allocate(6); buf.putChar('A'); buf.putInt(65); buf.flip(); AuxServicesEvent event = new AuxServicesEvent( - AuxServicesEventType.APPLICATION_INIT, "user0", appId, "Asrv", buf); + AuxServicesEventType.APPLICATION_INIT, "user0", appId1, "Asrv", buf); aux.handle(event); - appId.setId(66); + + ApplicationId appId2 = ApplicationId.createApplicationId(0, 66); event = new AuxServicesEvent( - AuxServicesEventType.APPLICATION_STOP, "user0", appId, "Bsrv", null); + AuxServicesEventType.APPLICATION_STOP, "user0", appId2, "Bsrv", null); // verify all services got the stop event aux.handle(event); Collection servs = aux.getServices(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java index 981ab39..e8c00d7 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java @@ -18,12 +18,14 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; -import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -53,7 +55,6 @@ import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.server.nodemanager.CMgrCompletedAppsEvent; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode; -import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.Signal; import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState; @@ -63,7 +64,6 @@ import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; import org.junit.Test; -import static org.mockito.Mockito.*; public class TestContainerManager extends BaseContainerManagerTest { @@ -76,9 +76,7 @@ public TestContainerManager() throws UnsupportedFileSystemException { } private ContainerId createContainerId() { - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(0, 0); ApplicationAttemptId appAttemptId = recordFactory.newRecordInstance(ApplicationAttemptId.class); appAttemptId.setApplicationId(appId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java index 0e07efb..f938710 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java @@ -18,7 +18,9 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import java.io.BufferedReader; import java.io.File; @@ -26,14 +28,14 @@ import java.io.FileReader; import java.io.IOException; import java.io.PrintWriter; -import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import junit.framework.Assert; + import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.util.Shell; @@ -57,7 +59,6 @@ import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode; import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest; -import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.ConverterUtils; @@ -65,9 +66,6 @@ import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin; import org.junit.Before; import org.junit.Test; -import static org.mockito.Mockito.*; - -import junit.framework.Assert; public class TestContainerLaunch extends BaseContainerManagerTest { @@ -164,9 +162,7 @@ public void testContainerEnvVariables() throws Exception { Container mockContainer = mock(Container.class); // ////// Construct the Container-id - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(0, 0); ApplicationAttemptId appAttemptId = recordFactory.newRecordInstance(ApplicationAttemptId.class); appAttemptId.setApplicationId(appId); @@ -334,9 +330,7 @@ public void testDelayedKill() throws Exception { Container mockContainer = mock(Container.class); // ////// Construct the Container-id - ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(1); - appId.setId(1); + ApplicationId appId = ApplicationId.createApplicationId(1, 1); ApplicationAttemptId appAttemptId = recordFactory.newRecordInstance(ApplicationAttemptId.class); appAttemptId.setApplicationId(appId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java index ccbf9f7..f58dc6c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java @@ -18,9 +18,19 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation; -import static org.mockito.Mockito.*; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyMap; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; @@ -47,15 +57,13 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; -import org.apache.hadoop.io.DataInputBuffer; -import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.YarnException; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; +import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; @@ -71,9 +79,9 @@ import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat; -import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader; +import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy; import org.apache.hadoop.yarn.server.nodemanager.CMgrCompletedAppsEvent; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; @@ -682,10 +690,7 @@ public void testLogAggregationForRealContainerLaunch() throws IOException, recordFactory.newRecordInstance(ContainerLaunchContext.class); Container mockContainer = mock(Container.class); // ////// Construct the Container-id - ApplicationId appId = - recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(0, 0); ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(appId, 1); ContainerId cId = BuilderUtils.newContainerId(appAttemptId, 0); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java index ad4a818..5a196e4 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java @@ -200,10 +200,7 @@ public void testContainerKillOnMemoryOverflow() throws IOException, recordFactory.newRecordInstance(ContainerLaunchContext.class); Container mockContainer = mock(Container.class); // ////// Construct the Container-id - ApplicationId appId = - recordFactory.newRecordInstance(ApplicationId.class); - appId.setClusterTimestamp(0); - appId.setId(0); + ApplicationId appId = ApplicationId.createApplicationId(0, 0); ApplicationAttemptId appAttemptId = recordFactory.newRecordInstance(ApplicationAttemptId.class); appAttemptId.setApplicationId(appId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java index aa7af9c..3868614 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java @@ -414,11 +414,7 @@ private void mockRMContext(YarnScheduler yarnScheduler, RMContext rmContext) } private ApplicationId getApplicationId(int id) { - ApplicationId applicationId = recordFactory - .newRecordInstance(ApplicationId.class); - applicationId.setClusterTimestamp(123456); - applicationId.setId(id); - return applicationId; + return ApplicationId.createApplicationId(123456, id); } private RMAppImpl getRMApp(RMContext rmContext, YarnScheduler yarnScheduler, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java index 66f9059..3e5e498 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Comparator; @@ -30,6 +31,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.net.NetworkTopology; +import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.QueueInfo; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; @@ -50,13 +52,10 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.security.ClientToAMTokenSecretManagerInRM; import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; -import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; -import static org.mockito.Mockito.*; public class TestCapacityScheduler { @@ -467,15 +466,9 @@ public void testApplicationComparator() { CapacityScheduler cs = new CapacityScheduler(); Comparator appComparator= cs.getApplicationComparator(); - ApplicationId id1 = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class); - id1.setClusterTimestamp(1); - id1.setId(1); - ApplicationId id2 = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class); - id2.setClusterTimestamp(1); - id2.setId(2); - ApplicationId id3 = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class); - id3.setClusterTimestamp(2); - id3.setId(1); + ApplicationId id1 = ApplicationId.createApplicationId(1, 1); + ApplicationId id2 = ApplicationId.createApplicationId(1, 2); + ApplicationId id3 = ApplicationId.createApplicationId(2, 1); //same clusterId FiCaSchedulerApp app1 = Mockito.mock(FiCaSchedulerApp.class); when(app1.getApplicationId()).thenReturn(id1); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFSSchedulerApp.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFSSchedulerApp.java index 62a1b9b..0babf6b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFSSchedulerApp.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFSSchedulerApp.java @@ -36,8 +36,7 @@ private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) { ApplicationAttemptId attId = recordFactory.newRecordInstance(ApplicationAttemptId.class); - ApplicationId appIdImpl = recordFactory.newRecordInstance(ApplicationId.class); - appIdImpl.setId(appId); + ApplicationId appIdImpl = ApplicationId.createApplicationId(0, appId); attId.setAttemptId(attemptId); attId.setApplicationId(appIdImpl); return attId; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java index e6761b9..dd026fd 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestFairScheduler.java @@ -138,8 +138,7 @@ private Configuration createConfiguration() { private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) { ApplicationAttemptId attId = recordFactory.newRecordInstance(ApplicationAttemptId.class); - ApplicationId appIdImpl = recordFactory.newRecordInstance(ApplicationId.class); - appIdImpl.setId(appId); + ApplicationId appIdImpl = ApplicationId.createApplicationId(0, appId); attId.setAttemptId(attemptId); attId.setApplicationId(appIdImpl); return attId; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java index 85076ba..6813cf7 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java @@ -94,9 +94,7 @@ public void tearDown() throws Exception { private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) { ApplicationAttemptId attId = recordFactory .newRecordInstance(ApplicationAttemptId.class); - ApplicationId appIdImpl = recordFactory - .newRecordInstance(ApplicationId.class); - appIdImpl.setId(appId); + ApplicationId appIdImpl = ApplicationId.createApplicationId(0, appId); attId.setAttemptId(attemptId); attId.setApplicationId(appIdImpl); return attId;