diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java index d71aff5..0f5563e 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java @@ -718,8 +718,10 @@ public long getTaskTrackerExpiryInterval() throws IOException, InterruptedExcept return TaskCompletionEvent.EMPTY_ARRAY; } - public org.apache.hadoop.mapreduce.JobStatus[] getAllJobs() {return null;} - + public org.apache.hadoop.mapreduce.JobStatus[] getAllJobs( + String applicationType) { + return null; + } /** * Returns the diagnostic information for a particular task in the given job. diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java index 0393130..522b3d9 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java @@ -781,9 +781,9 @@ public ClusterStatus run() throws IOException, InterruptedException { * @return array of {@link JobStatus} for the running/to-be-run jobs. * @throws IOException */ - public JobStatus[] jobsToComplete() throws IOException { + public JobStatus[] jobsToComplete(String applicationType) throws IOException { List stats = new ArrayList(); - for (JobStatus stat : getAllJobs()) { + for (JobStatus stat : getAllJobs(applicationType)) { if (!stat.isJobComplete()) { stats.add(stat); } @@ -797,14 +797,14 @@ public ClusterStatus run() throws IOException, InterruptedException { * @return array of {@link JobStatus} for the submitted jobs. * @throws IOException */ - public JobStatus[] getAllJobs() throws IOException { + public JobStatus[] getAllJobs(final String applicationType) throws IOException { try { org.apache.hadoop.mapreduce.JobStatus[] jobs = clientUgi.doAs(new PrivilegedExceptionAction< org.apache.hadoop.mapreduce.JobStatus[]> () { public org.apache.hadoop.mapreduce.JobStatus[] run() throws IOException, InterruptedException { - return cluster.getAllJobStatuses(); + return cluster.getAllJobStatuses(applicationType); } }); JobStatus[] stats = new JobStatus[jobs.length]; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java index e93f273..eb19a8a 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java @@ -267,8 +267,8 @@ public ClusterMetrics getClusterStatus() throws IOException, InterruptedExceptio * @deprecated Use {@link #getAllJobStatuses()} instead. */ @Deprecated - public Job[] getAllJobs() throws IOException, InterruptedException { - return getJobs(client.getAllJobs()); + public Job[] getAllJobs(String ApplicationType) throws IOException, InterruptedException { + return getJobs(client.getAllJobs(ApplicationType)); } /** @@ -277,8 +277,9 @@ public ClusterMetrics getClusterStatus() throws IOException, InterruptedExceptio * @throws IOException * @throws InterruptedException */ - public JobStatus[] getAllJobStatuses() throws IOException, InterruptedException { - return client.getAllJobs(); + public JobStatus[] getAllJobStatuses(String ApplicationType) + throws IOException, InterruptedException { + return client.getAllJobs(ApplicationType); } /** diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/protocol/ClientProtocol.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/protocol/ClientProtocol.java index ad58807..7902879 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/protocol/ClientProtocol.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/protocol/ClientProtocol.java @@ -219,7 +219,8 @@ public Counters getJobCounters(JobID jobid) * Get all the jobs submitted. * @return array of JobStatus for the submitted jobs */ - public JobStatus[] getAllJobs() throws IOException, InterruptedException; + public JobStatus[] getAllJobs(String ApplicationType) throws IOException, + InterruptedException; /** * Get task completion events for the jobid, starting from fromEventId. diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java index 0d6a68a..5dec135 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java @@ -54,6 +54,7 @@ import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.logaggregation.LogDumper; import com.google.common.base.Charsets; @@ -95,6 +96,7 @@ public int run(String[] argv) throws Exception { JobPriority jp = null; String taskType = null; String taskState = null; + String applicationType = null; int fromEvent = 0; int nEvents = 0; boolean getStatus = false; @@ -178,14 +180,15 @@ public int run(String[] argv) throws Exception { historyFile = argv[1]; } } else if ("-list".equals(cmd)) { - if (argv.length != 1 && !(argv.length == 2 && "all".equals(argv[1]))) { + if (argv.length != 1 && argv.length != 2) { displayUsage(cmd); return exitCode; } - if (argv.length == 2 && "all".equals(argv[1])) { + if ((argv.length == 2 && "all".equals(argv[1])) || argv.length == 1) { listAllJobs = true; } else { listJobs = true; + applicationType = argv[1]; } } else if("-kill-task".equals(cmd)) { if (argv.length != 2) { @@ -311,10 +314,10 @@ public int run(String[] argv) throws Exception { listEvents(cluster.getJob(JobID.forName(jobid)), fromEvent, nEvents); exitCode = 0; } else if (listJobs) { - listJobs(cluster); + listJobs(cluster, applicationType); exitCode = 0; } else if (listAllJobs) { - listAllJobs(cluster); + listAllJobs(cluster, YarnConfiguration.ALL_APPLICATION_TYPE); exitCode = 0; } else if (listActiveTrackers) { listActiveTrackers(cluster); @@ -416,7 +419,8 @@ private void displayUsage(String cmd) { } else if ("-history".equals(cmd)) { System.err.println(prefix + "[" + cmd + " ]"); } else if ("-list".equals(cmd)) { - System.err.println(prefix + "[" + cmd + " [all]]"); + System.err.println(prefix + "[" + cmd + " . " + + " is optional to list applications"); } else if ("-kill-task".equals(cmd) || "-fail-task".equals(cmd)) { System.err.println(prefix + "[" + cmd + " ]"); } else if ("-set-priority".equals(cmd)) { @@ -500,10 +504,10 @@ protected static String getTaskLogURL(TaskAttemptID taskId, String baseUrl) { * Dump a list of currently running jobs * @throws IOException */ - private void listJobs(Cluster cluster) + private void listJobs(Cluster cluster, String applicationType) throws IOException, InterruptedException { List runningJobs = new ArrayList(); - for (JobStatus job : cluster.getAllJobStatuses()) { + for (JobStatus job : cluster.getAllJobStatuses(applicationType)) { if (!job.isJobComplete()) { runningJobs.add(job); } @@ -515,9 +519,9 @@ private void listJobs(Cluster cluster) * Dump a list of all jobs submitted. * @throws IOException */ - private void listAllJobs(Cluster cluster) + private void listAllJobs(Cluster cluster, String applicationType) throws IOException, InterruptedException { - displayJobList(cluster.getAllJobStatuses()); + displayJobList(cluster.getAllJobStatuses(applicationType)); } /** diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java index dee5d18..e3b7213 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java @@ -73,9 +73,11 @@ public ResourceMgrDelegate(YarnConfiguration conf) { } } - public JobStatus[] getAllJobs() throws IOException, InterruptedException { + public JobStatus[] getAllJobs(String applicationType) throws IOException, + InterruptedException { try { - return TypeConverter.fromYarnApps(super.getApplicationList(), this.conf); + return TypeConverter.fromYarnApps( + super.getApplicationList(applicationType), this.conf); } catch (YarnException e) { throw new IOException(e); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index 9814ec9..2586583 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -162,8 +162,9 @@ public void cancelDelegationToken(Token arg0) } @Override - public JobStatus[] getAllJobs() throws IOException, InterruptedException { - return resMgrDelegate.getAllJobs(); + public JobStatus[] getAllJobs(String ApplicationType) throws IOException, + InterruptedException { + return resMgrDelegate.getAllJobs(ApplicationType); } @Override diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java index 9d7626d..fb30716 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java @@ -33,6 +33,7 @@ import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster; import org.apache.hadoop.net.StandardSocketFactory; +import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.junit.Assert; import org.junit.Test; @@ -93,7 +94,8 @@ public void testSocketFactory() throws IOException { + (Integer.parseInt(split[1]) + 10)); client = new JobClient(jconf); - JobStatus[] jobs = client.jobsToComplete(); + JobStatus[] jobs = + client.jobsToComplete(YarnConfiguration.ALL_APPLICATION_TYPE); Assert.assertTrue(jobs.length == 0); } finally { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java index e6e12eb..ecffbc2 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java @@ -41,6 +41,7 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; +import org.apache.hadoop.yarn.conf.YarnConfiguration; /** * This class tests reliability of the framework in the face of failures of @@ -216,8 +217,9 @@ public void run() { t.start(); JobStatus[] jobs; //get the job ID. This is the job that we just submitted - while ((jobs = jc.jobsToComplete()).length == 0) { - LOG.info("Waiting for the job " + jobClass +" to start"); + while ((jobs = jc.jobsToComplete(YarnConfiguration.ALL_APPLICATION_TYPE)) + .length == 0) { + LOG.info("Waiting for the job " + jobClass + " to start"); Thread.sleep(1000); } JobID jobId = jobs[jobs.length - 1].getJobID(); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java index 017b993..08d601c 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java @@ -315,7 +315,8 @@ public GetClusterMetricsResponse getClusterMetrics( @Override public GetAllApplicationsResponse getAllApplications( - GetAllApplicationsRequest request) throws IOException { + GetAllApplicationsRequest request, String applicationType) + throws IOException { return null; } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java index 57b09c5..aad67d8 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java @@ -105,8 +105,8 @@ public void tesAllJobs() throws Exception { allApplicationsResponse.setApplicationList(applications); Mockito.when( applicationsManager.getAllApplications(Mockito - .any(GetAllApplicationsRequest.class))).thenReturn( - allApplicationsResponse); + .any(GetAllApplicationsRequest.class), Mockito.anyString())) + .thenReturn(allApplicationsResponse); ResourceMgrDelegate resourceMgrDelegate = new ResourceMgrDelegate( new YarnConfiguration()) { @Override @@ -114,7 +114,8 @@ protected void serviceStart() { this.rmClient = applicationsManager; } }; - JobStatus[] allJobs = resourceMgrDelegate.getAllJobs(); + JobStatus[] allJobs = + resourceMgrDelegate.getAllJobs(YarnConfiguration.ALL_APPLICATION_TYPE); Assert.assertEquals(State.FAILED, allJobs[0].getState()); Assert.assertEquals(State.SUCCEEDED, allJobs[1].getState()); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java index e25f3cd..73a7deb 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java @@ -94,6 +94,7 @@ import org.apache.log4j.WriterAppender; import org.junit.Before; import org.junit.Test; +import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -211,10 +212,10 @@ protected void serviceStart() { verify(clientRMProtocol).forceKillApplication(any(KillApplicationRequest.class)); /* make sure getalljobs calls get all applications */ - when(clientRMProtocol.getAllApplications(any(GetAllApplicationsRequest.class))). + when(clientRMProtocol.getAllApplications(any(GetAllApplicationsRequest.class), Mockito.anyString())). thenReturn(recordFactory.newRecordInstance(GetAllApplicationsResponse.class)); - delegate.getAllJobs(); - verify(clientRMProtocol).getAllApplications(any(GetAllApplicationsRequest.class)); + delegate.getAllJobs(YarnConfiguration.ALL_APPLICATION_TYPE); + verify(clientRMProtocol).getAllApplications(any(GetAllApplicationsRequest.class), Mockito.anyString()); /* make sure getapplication report is called */ when(clientRMProtocol.getApplicationReport(any(GetApplicationReportRequest.class))) diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java index 972391c..56d21fe 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java @@ -54,6 +54,7 @@ import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.commons.logging.Log; @@ -221,7 +222,8 @@ public static String getSpace(int len) { * Gets job status from the jobtracker given the jobclient and the job id */ static JobStatus getJobStatus(JobClient jc, JobID id) throws IOException { - JobStatus[] statuses = jc.getAllJobs(); + JobStatus[] statuses = + jc.getAllJobs(YarnConfiguration.ALL_APPLICATION_TYPE); for (JobStatus jobStatus : statuses) { if (jobStatus.getJobID().equals(id)) { return jobStatus; @@ -264,7 +266,8 @@ static void waitTillDone(JobClient jobClient) throws IOException { // Wait for the last job to complete while (true) { boolean shouldWait = false; - for (JobStatus jobStatuses : jobClient.getAllJobs()) { + for (JobStatus jobStatuses : jobClient + .getAllJobs(YarnConfiguration.ALL_APPLICATION_TYPE)) { if (jobStatuses.getRunState() != JobStatus.SUCCEEDED && jobStatuses.getRunState() != JobStatus.FAILED && jobStatuses.getRunState() != JobStatus.KILLED) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java index 593a8ea..8253586 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java @@ -207,12 +207,13 @@ public GetClusterMetricsResponse getClusterMetrics( *

* * @param request request for report on all running applications + * @param applicationType * @return report on all running applications * @throws YarnException * @throws IOException */ public GetAllApplicationsResponse getAllApplications( - GetAllApplicationsRequest request) + GetAllApplicationsRequest request, String applicationType) throws YarnException, IOException; /** diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 3ef06e0..ef44b6f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -256,6 +256,9 @@ /** Default application type */ public static final String DEFAULT_APPLICATION_TYPE = "YARN"; + /**All application type*/ + public static final String ALL_APPLICATION_TYPE = "ALL"; + /** Default application type length */ public static final int APPLICATION_TYPE_LENGTH = 20; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/client_RM_protocol.proto hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/client_RM_protocol.proto index 5aa2380..1182b7b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/client_RM_protocol.proto +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/client_RM_protocol.proto @@ -24,13 +24,18 @@ option java_generate_equals_and_hash = true; import "Security.proto"; import "yarn_service_protos.proto"; +message AllApplicationsRequestProto { + optional GetAllApplicationsRequestProto allApplicationsRequest = 1; + optional string applicationType = 2; +} + service ClientRMProtocolService { rpc getNewApplication (GetNewApplicationRequestProto) returns (GetNewApplicationResponseProto); rpc getApplicationReport (GetApplicationReportRequestProto) returns (GetApplicationReportResponseProto); rpc submitApplication (SubmitApplicationRequestProto) returns (SubmitApplicationResponseProto); rpc forceKillApplication (KillApplicationRequestProto) returns (KillApplicationResponseProto); rpc getClusterMetrics (GetClusterMetricsRequestProto) returns (GetClusterMetricsResponseProto); - rpc getAllApplications (GetAllApplicationsRequestProto) returns (GetAllApplicationsResponseProto); + rpc getAllApplications (AllApplicationsRequestProto) returns (GetAllApplicationsResponseProto); rpc getClusterNodes (GetClusterNodesRequestProto) returns (GetClusterNodesResponseProto); rpc getQueueInfo (GetQueueInfoRequestProto) returns (GetQueueInfoResponseProto); rpc getQueueUserAcls (GetQueueUserAclsInfoRequestProto) returns (GetQueueUserAclsInfoResponseProto); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java index 53303b1..d0b38e2 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java @@ -146,8 +146,8 @@ ApplicationReport getApplicationReport(ApplicationId appId) * @throws YarnException * @throws IOException */ - List getApplicationList() throws YarnException, - IOException; + List getApplicationList(String applicationType) + throws YarnException, IOException; /** *

diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java index 48be1a3..2b75998 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java @@ -191,11 +191,12 @@ public ApplicationReport getApplicationReport(ApplicationId appId) } @Override - public List getApplicationList() + public List getApplicationList(String applicationType) throws YarnException, IOException { GetAllApplicationsRequest request = Records.newRecord(GetAllApplicationsRequest.class); - GetAllApplicationsResponse response = rmClient.getAllApplications(request); + GetAllApplicationsResponse response = + rmClient.getAllApplications(request, applicationType); return response.getApplicationList(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java index 6bcd804..5efa281 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java @@ -52,7 +52,7 @@ public int run(String[] args) throws Exception { Options opts = new Options(); opts.addOption(STATUS_CMD, true, "Prints the status of the application."); - opts.addOption(LIST_CMD, false, "Lists all the Applications from RM."); + opts.addOption(LIST_CMD, true, "Lists all the Applications from RM."); opts.addOption(KILL_CMD, true, "Kills the application."); CommandLine cliParser = new GnuParser().parse(opts, args); @@ -64,7 +64,11 @@ public int run(String[] args) throws Exception { } printApplicationReport(cliParser.getOptionValue(STATUS_CMD)); } else if (cliParser.hasOption(LIST_CMD)) { - listAllApplications(); + if(args.length != 2) { + printUsage(opts); + return exitCode; + } + listAllApplications(cliParser.getOptionValue(LIST_CMD)); } else if (cliParser.hasOption(KILL_CMD)) { if (args.length != 2) { printUsage(opts); @@ -93,9 +97,11 @@ private void printUsage(Options opts) { * @throws YarnException * @throws IOException */ - private void listAllApplications() throws YarnException, IOException { + private void listAllApplications(String applicationType) + throws YarnException, IOException { PrintWriter writer = new PrintWriter(sysout); - List appsReport = client.getApplicationList(); + List appsReport = + client.getApplicationList(applicationType); writer.println("Total Applications:" + appsReport.size()); writer.printf(APPLICATIONS_PATTERN, "Application-Id", diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java index 49d7867..2fe0f15 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java @@ -47,6 +47,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.YarnClient; +import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.util.Records; import org.junit.Before; import org.junit.Test; @@ -115,10 +116,10 @@ public void testGetAllApplications() throws Exception { FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.53789f, "YARN"); List applicationReports = new ArrayList(); applicationReports.add(newApplicationReport); - when(client.getApplicationList()).thenReturn(applicationReports); - int result = cli.run(new String[] { "-list" }); + when(client.getApplicationList("YARN")).thenReturn(applicationReports); + int result = cli.run(new String[] { "-list", "YARN" }); assertEquals(0, result); - verify(client).getApplicationList(); + verify(client).getApplicationList("YARN"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintWriter pw = new PrintWriter(baos); @@ -137,6 +138,43 @@ public void testGetAllApplications() throws Exception { String appsReportStr = baos.toString("UTF-8"); Assert.assertEquals(appsReportStr, sysOutStream.toString()); verify(sysOut, times(1)).write(any(byte[].class), anyInt(), anyInt()); + + sysOutStream.reset(); + ApplicationId applicationId2 = ApplicationId.newInstance(1234, 6); + ApplicationReport newApplicationReport2 = ApplicationReport.newInstance( + applicationId2, ApplicationAttemptId.newInstance(applicationId2, 2), + "user2", "queue2", "appname2", "host2", 125, null, + YarnApplicationState.FINISHED, "diagnostics2", "url2", 2, 2, + FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.63789f, "NON-YARN"); + applicationReports.add(newApplicationReport2); + when(client.getApplicationList(YarnConfiguration.ALL_APPLICATION_TYPE)) + .thenReturn(applicationReports); + result = cli.run(new String[] { "-list", "ALL" }); + assertEquals(0, result); + verify(client).getApplicationList(YarnConfiguration.ALL_APPLICATION_TYPE); + System.out.println(sysOutStream.toString()); + baos = new ByteArrayOutputStream(); + pw = new PrintWriter(baos); + pw.println("Total Applications:2"); + pw.print(" Application-Id\t Application-Name"); + pw.print("\t Application-Type"); + pw.print("\t User\t Queue\t State\t "); + pw.print("Final-State\t Progress"); + pw.println("\t Tracking-URL"); + pw.print(" application_1234_0005\t "); + pw.print("appname\t YARN\t user\t "); + pw.print("queue\t FINISHED\t "); + pw.print("SUCCEEDED\t 53.79%"); + pw.println("\t N/A"); + pw.print(" application_1234_0006\t "); + pw.print("appname2\t NON-YARN\t user2\t "); + pw.print("queue2\t FINISHED\t "); + pw.print("SUCCEEDED\t 63.79%"); + pw.println("\t N/A"); + pw.close(); + appsReportStr = baos.toString("UTF-8"); + Assert.assertEquals(appsReportStr, sysOutStream.toString()); + verify(sysOut, times(2)).write(any(byte[].class), anyInt(), anyInt()); } @Test diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java index 8394324..56ee8d2 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java @@ -80,6 +80,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; +import org.apache.hadoop.yarn.proto.ClientRMProtocol.AllApplicationsRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto; @@ -187,13 +188,17 @@ public SubmitApplicationResponse submitApplication( @Override public GetAllApplicationsResponse getAllApplications( - GetAllApplicationsRequest request) throws YarnException, - IOException { + GetAllApplicationsRequest request, String applicationType) + throws YarnException, IOException { GetAllApplicationsRequestProto requestProto = ((GetAllApplicationsRequestPBImpl) request).getProto(); + AllApplicationsRequestProto allApplicationsRequestProto = + AllApplicationsRequestProto.newBuilder() + .setAllApplicationsRequest(requestProto) + .setApplicationType(applicationType).build(); try { return new GetAllApplicationsResponsePBImpl(proxy.getAllApplications( - null, requestProto)); + null, allApplicationsRequestProto)); } catch (ServiceException e) { RPCUtil.unwrapAndThrowException(e); return null; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java index caad876..c3075f8 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java @@ -65,6 +65,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl; import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.hadoop.yarn.proto.ClientRMProtocol.AllApplicationsRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto; @@ -169,12 +170,13 @@ public SubmitApplicationResponseProto submitApplication(RpcController arg0, @Override public GetAllApplicationsResponseProto getAllApplications( - RpcController controller, GetAllApplicationsRequestProto proto) + RpcController controller, AllApplicationsRequestProto proto) throws ServiceException { GetAllApplicationsRequestPBImpl request = - new GetAllApplicationsRequestPBImpl(proto); + new GetAllApplicationsRequestPBImpl(proto.getAllApplicationsRequest()); try { - GetAllApplicationsResponse response = real.getAllApplications(request); + GetAllApplicationsResponse response = + real.getAllApplications(request, proto.getApplicationType()); return ((GetAllApplicationsResponsePBImpl)response).getProto(); } catch (YarnException e) { throw new ServiceException(e); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java index 88bc68b..ec6552f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java @@ -393,7 +393,7 @@ public GetClusterMetricsResponse getClusterMetrics( @Override public GetAllApplicationsResponse getAllApplications( - GetAllApplicationsRequest request) throws YarnException { + GetAllApplicationsRequest request, String applicationType) throws YarnException { UserGroupInformation callerUGI; try { @@ -407,7 +407,12 @@ public GetAllApplicationsResponse getAllApplications( for (RMApp application : this.rmContext.getRMApps().values()) { boolean allowAccess = checkAccess(callerUGI, application.getUser(), ApplicationAccessType.VIEW_APP, application.getApplicationId()); - reports.add(application.createAndGetApplicationReport(allowAccess)); + ApplicationReport applicationReport = + application.createAndGetApplicationReport(allowAccess); + if (YarnConfiguration.ALL_APPLICATION_TYPE.equals(applicationType) + || applicationReport.getApplicationType().equals(applicationType)) { + reports.add(application.createAndGetApplicationReport(allowAccess)); + } } GetAllApplicationsResponse response = diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java index ef36858..b05b809 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java @@ -214,7 +214,8 @@ private void verifyOwnerAccess() throws Exception { // List apps as owner Assert.assertEquals("App view by owner should list the apps!!", 1, rmClient.getAllApplications( - recordFactory.newRecordInstance(GetAllApplicationsRequest.class)) + recordFactory.newRecordInstance(GetAllApplicationsRequest.class), + YarnConfiguration.DEFAULT_APPLICATION_TYPE) .getApplicationList().size()); // Kill app as owner @@ -245,7 +246,8 @@ private void verifySuperUserAccess() throws Exception { // List apps as superUser Assert.assertEquals("App view by super-user should list the apps!!", 2, superUserClient.getAllApplications( - recordFactory.newRecordInstance(GetAllApplicationsRequest.class)) + recordFactory.newRecordInstance(GetAllApplicationsRequest.class), + YarnConfiguration.DEFAULT_APPLICATION_TYPE) .getApplicationList().size()); // Kill app as the superUser @@ -276,7 +278,8 @@ private void verifyFriendAccess() throws Exception { // List apps as friend Assert.assertEquals("App view by a friend should list the apps!!", 3, friendClient.getAllApplications( - recordFactory.newRecordInstance(GetAllApplicationsRequest.class)) + recordFactory.newRecordInstance(GetAllApplicationsRequest.class), + YarnConfiguration.DEFAULT_APPLICATION_TYPE) .getApplicationList().size()); // Kill app as the friend @@ -309,7 +312,8 @@ private void verifyEnemyAccess() throws Exception { // List apps as enemy List appReports = enemyRmClient .getAllApplications(recordFactory - .newRecordInstance(GetAllApplicationsRequest.class)) + .newRecordInstance(GetAllApplicationsRequest.class), + YarnConfiguration.DEFAULT_APPLICATION_TYPE) .getApplicationList(); Assert.assertEquals("App view by enemy should list the apps!!", 4, appReports.size());